summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKarl Berry <karl@freefriends.org>2015-12-11 23:16:34 +0000
committerKarl Berry <karl@freefriends.org>2015-12-11 23:16:34 +0000
commit71945bfdf73e58a09c05155c6d49729528cac3f7 (patch)
treeaa614024e79d15166a1aa500e223c748b1a66c83
parentfa3dbdbd11bbc7da0c06e2871472419998e645c9 (diff)
luaotfload (11dec15)
git-svn-id: svn://tug.org/texlive/trunk@39078 c570f23f-e606-0410-a88d-b1316a301751
-rwxr-xr-xBuild/source/texk/texlive/linked_scripts/luaotfload/luaotfload-tool.lua389
-rw-r--r--Master/texmf-dist/doc/luatex/luaotfload/NEWS19
-rw-r--r--Master/texmf-dist/doc/luatex/luaotfload/README6
-rw-r--r--Master/texmf-dist/doc/luatex/luaotfload/filegraph.pdfbin45642 -> 60168 bytes
-rw-r--r--Master/texmf-dist/doc/luatex/luaotfload/luaotfload.pdfbin170452 -> 194494 bytes
-rw-r--r--Master/texmf-dist/doc/man/man1/luaotfload-tool.116
-rw-r--r--Master/texmf-dist/doc/man/man1/luaotfload-tool.man1.pdfbin18400 -> 44933 bytes
-rw-r--r--Master/texmf-dist/doc/man/man5/luaotfload.conf.571
-rw-r--r--Master/texmf-dist/doc/man/man5/luaotfload.conf.man5.pdfbin18117 -> 0 bytes
-rwxr-xr-xMaster/texmf-dist/scripts/luaotfload/luaotfload-tool.lua389
-rwxr-xr-xMaster/texmf-dist/scripts/luaotfload/mkcharacters12
-rwxr-xr-xMaster/texmf-dist/scripts/luaotfload/mkglyphlist2
-rwxr-xr-xMaster/texmf-dist/scripts/luaotfload/mkimport871
-rwxr-xr-xMaster/texmf-dist/scripts/luaotfload/mkstatus174
-rwxr-xr-xMaster/texmf-dist/scripts/luaotfload/mktests362
-rw-r--r--Master/texmf-dist/source/luatex/luaotfload/Makefile58
-rw-r--r--Master/texmf-dist/source/luatex/luaotfload/filegraph.dot176
-rw-r--r--Master/texmf-dist/source/luatex/luaotfload/luaotfload-latex.tex96
-rw-r--r--Master/texmf-dist/source/luatex/luaotfload/luaotfload-main.tex364
-rw-r--r--Master/texmf-dist/source/luatex/luaotfload/luaotfload-tool.rst15
-rw-r--r--Master/texmf-dist/source/luatex/luaotfload/luaotfload.conf.rst59
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-2015-12-09.lua11552
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-basics-gen.lua (renamed from Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-gen.lua)14
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-basics-nod.lua (renamed from Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-nod.lua)111
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-basics.tex93
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-data-con.lua138
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-afk.lua200
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-afm.lua1047
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-cid.lua177
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-con.lua1448
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-def.lua454
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-ini.lua32
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-map.lua441
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-otb.lua707
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-otf.lua3052
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-oti.lua91
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-otp.lua909
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-tfm.lua198
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-cbk.lua220
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-def.lua (renamed from Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-def.lua)0
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-demo-vf-1.lua44
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-enc.lua (renamed from Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-enc.lua)7
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-ext.lua (renamed from Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-ext.lua)0
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-inj.lua1152
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-lua.lua (renamed from Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-lua.lua)0
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-ota.lua459
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-otn.lua (renamed from Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-otn.lua)2778
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-syn.lua106
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-tfm.lua (renamed from Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-tfm.lua)0
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts.lua270
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts.tex140
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-boolean.lua69
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-file.lua698
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-function.lua11
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-io.lua358
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-lpeg.lua1173
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-lua.lua192
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-math.lua38
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-string.lua213
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-table.lua1265
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-languages.lua45
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-languages.tex17
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-math.lua53
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-math.tex1874
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-mplib.lua591
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-mplib.tex140
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-plain.tex53
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor-test.tex30
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor.lua163
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor.tex14
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-reference.lua16456
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib-test.lua25
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib-test.tex11
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib.lua62
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib.tex20
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-test.tex134
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-tl2014.lua (renamed from Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fontloader.lua)0
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/fontloader-util-str.lua1134
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-auxiliary.lua154
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-characters.lua7780
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-colors.lua352
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-configuration.lua337
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-database.lua605
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-diagnostics.lua47
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-features.lua83
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-cbk.lua68
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-inj.lua526
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-init.lua573
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-letterspace.lua299
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-loaders.lua181
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-log.lua55
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-main.lua790
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-override.lua52
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-package.lua99
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-parsers.lua132
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-resolvers.lua254
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-status.lua99
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload.sty21
98 files changed, 62693 insertions, 3572 deletions
diff --git a/Build/source/texk/texlive/linked_scripts/luaotfload/luaotfload-tool.lua b/Build/source/texk/texlive/linked_scripts/luaotfload/luaotfload-tool.lua
index 8a65298d03a..69b6c976fb1 100755
--- a/Build/source/texk/texlive/linked_scripts/luaotfload/luaotfload-tool.lua
+++ b/Build/source/texk/texlive/linked_scripts/luaotfload/luaotfload-tool.lua
@@ -2,15 +2,13 @@
-----------------------------------------------------------------------
-- FILE: luaotfload-tool.lua
-- DESCRIPTION: database functionality
--- REQUIREMENTS: luaotfload 2.5
+-- REQUIREMENTS: luaotfload 2.6
-- AUTHOR: Khaled Hosny, Élie Roux, Philipp Gesang
--- VERSION: 2.5
-- LICENSE: GPL v2.0
--- MODIFIED: 2014-08-10 23:13:18+0200
-----------------------------------------------------------------------
luaotfload = luaotfload or { }
-local version = "2.5-4" --- <int: major>.<int: minor>-<int: fixes>
+local version = "2.6"
luaotfload.version = version
luaotfload.self = "luaotfload-tool"
@@ -76,21 +74,13 @@ else -- 5.2
runtime = { "stock", _VERSION }
end
-
local C, Ct, P, S = lpeg.C, lpeg.Ct, lpeg.P, lpeg.S
local lpegmatch = lpeg.match
-local loader_file = "luatexbase.loader.lua"
-local loader_path = assert(kpsefind_file(loader_file, "lua"),
- "File '"..loader_file.."' not found")
-
-
string.quoted = string.quoted or function (str)
return string.format("%q",str)
end
-require (loader_path)
-
--[[doc--
XXX:
@@ -109,6 +99,7 @@ config.lualibs.prefer_merged = true
config.lualibs.load_extended = true
require "lualibs"
+
local iosavedata = io.savedata
local lfsisdir = lfs.isdir
local lfsisfile = lfs.isfile
@@ -138,22 +129,69 @@ local backup = {
}
texio.write, texio.write_nl = dummy_function, dummy_function
-require"luaotfload-basics-gen.lua"
+require "fontloader-basics-gen.lua"
texio.write, texio.write_nl = backup.write, backup.write_nl
utilities = backup.utilities
-require "luaotfload-log.lua" --- this populates the luaotfload.log.* namespace
-require "luaotfload-parsers" --- fonts.conf, configuration, and request syntax
-require "luaotfload-configuration" --- configuration file handling
-require "luaotfload-database"
+fonts = { names = { } } -- for db; normally provided by the fontloaders
+
+local require_init = { }
+
+local loadmodule = function (name)
+ local v = require ("luaotfload-" .. name)
+ if v then
+ local mod = { }
+ local tv = type (v)
+ if tv == "table" then
+ mod.name = name
+ mod.init = v.init
+ require_init [#require_init + 1] = mod
+ elseif tv == "function" then
+ mod.name = name
+ mod.init = v
+ require_init [#require_init + 1] = mod
+ end
+ end
+end
+
require "alt_getopt"
-local names = fonts.names
-local sanitize_fontname = names.sanitize_fontname
+loadmodule "log.lua" --- this populates the luaotfload.log.* namespace
+loadmodule "parsers" --- fonts.conf, configuration, and request syntax
+loadmodule "configuration" --- configuration file handling
+loadmodule "database"
+loadmodule "resolvers" --- Font lookup
+
+local logreport
+
+local init_modules = function ()
+ --- NB we don’t command the logger at this point.
+ local todo = #require_init
+ local ret = true
+ for i = 1, todo do
+ local mod = require_init[i]
+ local name = mod.name
+ local init = mod.init
+ if type (init) ~= "function" then
+ error ("luaotfload broken; module "
+ .. name .. " missing initializers!")
+ end
+ local v = mod.init ()
+ if v == true then
+ --- evaluated well
+ elseif type (v) == "table" then
+ luaotfload[name] = v
+ else
+ error ("luaotfload broken; initialization of module "
+ .. name .. " returned " .. tostring (v) .. ".")
+ return false
+ end
+ end
+ logreport = luaotfload.log.report
+ return ret
+end
-local log = luaotfload.log
-local report = log.report
local help_messages = {
["luaotfload-tool"] = [[
@@ -170,8 +208,7 @@ Usage: %s [OPTIONS...]
-q --quiet don't output anything
-v --verbose=LEVEL be more verbose (print the searched directories)
- -vv print the loaded fonts
- -vvv print all steps of directory searching
+ -v, -vv .. -vvvvvvvvv set loglevel in unary
--log=stdout redirect log output to stdout
-V --version print version and exit
@@ -191,7 +228,7 @@ Usage: %s [OPTIONS...]
-c --no-compress do not gzip index file (text version only)
-l --flush-lookups empty lookup cache of font requests
-D --dry-run skip loading of fonts, just scan
- --formats=[+|-]EXTENSIONS set, add, or subtract formats to index
+ --formats=[+|-]EXTENSIONS set, add, or subtract file formats
-p --prefer-texmf prefer fonts in the TEXMF over system fonts
--max-fonts=N process at most N font files
@@ -259,7 +296,7 @@ local help_msg = function (version)
luaotfload.self,
names_gzip,
names_bin,
- caches.getwritablepath (config.luaotfload.cache_dir)))
+ caches.getwritablepath (config.luaotfload.paths.cache_dir, "")))
end
local about = [[
@@ -272,7 +309,7 @@ local about = [[
local version_msg = function ( )
local out = function (...) texiowrite_nl (stringformat (...)) end
local uname = os.uname ()
- local meta = names.getmetadata ()
+ local meta = fonts.names.getmetadata ()
out (about, luaotfload.self)
out ("%s version: %q", luaotfload.self, version)
out ("Revision: %q", config.luaotfload.status.notes.revision)
@@ -288,10 +325,14 @@ local version_msg = function ( )
local var = uname_vars[i]
out (" + %8s: %s", var, uname[var])
end
- out ("Index: version=%q created=%q modified=%q",
- config.luaotfload.status.notes.revision,
- meta.created or "ages ago",
- meta.modified or "ages ago")
+ if meta == false then
+ out("No database metadata available.")
+ else
+ out ("Index: version=%q created=%q modified=%q",
+ config.luaotfload.status.notes.revision,
+ meta.created or "ages ago",
+ meta.modified or "ages ago")
+ end
out ""
end
@@ -669,7 +710,7 @@ subfont_by_name = function (lst, askedname, n)
local font = lst[n]
if font then
- if sanitize_fontname (font.fullname) == askedname then
+ if fonts.names.sanitize_fontname (font.fullname) == askedname then
return font
end
return subfont_by_name (lst, askedname, n+1)
@@ -686,10 +727,10 @@ The font info knows two levels of detail:
--doc]]--
local show_font_info = function (basename, askedname, detail, warnings)
- local filenames = names.data().files
+ local filenames = fonts.names.data().files
local index = filenames.base[basename]
local fullname = filenames.full[index]
- askedname = sanitize_fontname (askedname)
+ askedname = fonts.names.sanitize_fontname (askedname)
if not fullname then -- texmf
fullname = resolvers.findfile(basename)
end
@@ -699,9 +740,9 @@ local show_font_info = function (basename, askedname, detail, warnings)
if nfonts > 0 then -- true type collection
local subfont
if askedname then
- report (true, 1, "resolve",
- [[%s is part of the font collection %s]],
- askedname, basename)
+ logreport (true, 1, "resolve",
+ [[%s is part of the font collection %s]],
+ askedname, basename)
subfont = subfont_by_name(shortinfo, askedname)
end
if subfont then
@@ -710,11 +751,11 @@ local show_font_info = function (basename, askedname, detail, warnings)
show_full_info(fullname, subfont, warnings)
end
else -- list all subfonts
- report (true, 1, "resolve",
- [[%s is a font collection]], basename)
+ logreport (true, 1, "resolve",
+ [[%s is a font collection]], basename)
for subfont = 1, nfonts do
- report (true, 1, "resolve",
- [[Showing info for font no. %d]], n)
+ logreport (true, 1, "resolve",
+ [[Showing info for font no. %d]], n)
show_info_items(shortinfo[subfont])
if detail == true then
show_full_info(fullname, subfont, warnings)
@@ -728,7 +769,7 @@ local show_font_info = function (basename, askedname, detail, warnings)
end
end
else
- report (true, 1, "resolve", "Font %s not found", filename)
+ logreport (true, 1, "resolve", "Font %s not found", filename)
end
end
@@ -739,9 +780,10 @@ set.
--]]--
local action_sequence = {
- "config", "loglevel", "help", "version",
- "diagnose", "blacklist", "cache", "flush",
- "bisect", "generate", "list", "query",
+ "config" , "loglevel" , "help" , "version" ,
+ "dumpconf" , "diagnose" , "blacklist" , "cache" ,
+ "flush" , "bisect" , "generate" , "list" ,
+ "query" ,
}
local action_pending = tabletohash(action_sequence, false)
@@ -755,9 +797,9 @@ local actions = { } --- (jobspec -> (bool * bool)) list
actions.loglevel = function (job)
local lvl = job.log_level
if lvl then
- log.set_loglevel(lvl)
- report ("info", 3, "util", "Setting the log level to %d.", lvl)
- report ("log", 2, "util", "Lua=%q", _VERSION)
+ luaotfload.log.set_loglevel(lvl)
+ logreport ("info", 3, "util", "Setting the log level to %d.", lvl)
+ logreport ("log", 2, "util", "Lua=%q", _VERSION)
end
return true, true
end
@@ -781,25 +823,32 @@ actions.version = function (job)
return true, false
end
+actions.dumpconf = function (job)
+ config.actions.dump ()
+ return true, false
+end
+
actions.help = function (job)
help_msg (job.help_version or "luaotfload-tool")
return true, false
end
actions.blacklist = function (job)
- names.read_blacklist()
+ fonts.names.read_blacklist()
local n = 0
- for n, entry in next, tablesortedkeys(names.blacklist) do
+ for n, entry in next, tablesortedkeys(fonts.names.blacklist) do
iowrite (stringformat("(%d %s)\n", n, entry))
end
return true, false
end
actions.generate = function (job)
- local _ = names.update (fontnames, job.force_reload, job.dry_run)
- local namedata = names.data ()
+ local _ = fonts.names.update (fontnames, job.force_reload, job.dry_run)
+ local namedata = fonts.names.data ()
if namedata then
- report ("info", 2, "db", "Fonts in the database: %i", #namedata.mappings)
+ logreport ("info", 2, "db",
+ "Fonts in the database: %i",
+ #namedata.mappings)
return true, true
end
return false, false
@@ -809,7 +858,7 @@ end
--- bisect mode
-------------------------------------------------------------------------------
-local bisect_status_path = caches.getwritablepath "bisect"
+local bisect_status_path = caches.getwritablepath ("bisect", "")
local bisect_status_file = bisect_status_path .."/" .. "luaotfload-bisect-status.lua"
local bisect_status_fmt = [[
--[==[-------------------------------------------------------------------------
@@ -835,12 +884,14 @@ local write_bisect_status = function (data)
osdate ("%Y-%m-d %H:%M:%S", os.time ()),
payload)
if status and iosavedata (bisect_status_file, status) then
- report ("info", 4, "bisect",
- "Bisection state written to %s.", bisect_status_file)
+ logreport ("info", 4, "bisect",
+ "Bisection state written to %s.",
+ bisect_status_file)
return true
end
- report ("info", 0, "bisect",
- "Failed to write bisection state to %s.", bisect_status_file)
+ logreport ("info", 0, "bisect",
+ "Failed to write bisection state to %s.",
+ bisect_status_file)
return false
end
@@ -852,16 +903,22 @@ end
--- unit -> state list
local read_bisect_status = function ()
- report ("info", 4, "bisect", "Testing for status file: %q.", bisect_status_file)
+ logreport ("info", 4, "bisect",
+ "Testing for status file: %q.",
+ bisect_status_file)
if not lfsisfile (bisect_status_file) then
- report ("info", 2, "bisect", "No such file: %q.", bisect_status_file)
- report ("info", 0, "bisect", "Not in bisect mode.")
+ logreport ("info", 2, "bisect",
+ "No such file: %q.", bisect_status_file)
+ logreport ("info", 0, "bisect",
+ "Not in bisect mode.")
return false
end
- report ("info", 4, "bisect", "Reading status file: %q.", bisect_status_file)
+ logreport ("info", 4, "bisect",
+ "Reading status file: %q.", bisect_status_file)
local success, status = pcall (dofile, bisect_status_file)
if not success then
- report ("info", 0, "bisect", "Could not read status file.")
+ logreport ("info", 0, "bisect",
+ "Could not read status file.")
return false
end
return status
@@ -876,19 +933,21 @@ end
local bisect_start = function ()
if lfsisfile (bisect_status_file) then
- report ("info", 0, "bisect",
- "Bisect session in progress.",
- bisect_status_file)
- report ("info", 0, "bisect",
- "Use --bisect=stop to erase it before starting over.")
+ logreport ("info", 0, "bisect",
+ "Bisect session in progress.",
+ bisect_status_file)
+ logreport ("info", 0, "bisect",
+ "Use --bisect=stop to erase it before starting over.")
return false, false
end
- report ("info", 2, "bisect",
- "Starting bisection of font database %q.", bisect_status_file)
- local n = names.count_font_files ()
+ logreport ("info", 2, "bisect",
+ "Starting bisection of font database %q.",
+ bisect_status_file)
+ local n = fonts.names.count_font_files ()
local pivot = mathfloor (n / 2)
local data = { { 1, n, pivot } }
- report ("info", 0, "bisect", "Initializing pivot to %d.", pivot)
+ logreport ("info", 0, "bisect",
+ "Initializing pivot to %d.", pivot)
if write_bisect_status (data) then
return true, false
end
@@ -902,21 +961,23 @@ end
--doc]]--
local bisect_stop = function ()
- report ("info", 3, "bisect", "Erasing bisection state at %s.", bisect_status_file)
+ logreport ("info", 3, "bisect",
+ "Erasing bisection state at %s.",
+ bisect_status_file)
if lfsisfile (bisect_status_file) then
local success, msg = os.remove (bisect_status_file)
if not success then
- report ("info", 2, "bisect",
- "Failed to erase file %s (%s).",
- bisect_status_file, msg)
+ logreport ("info", 2, "bisect",
+ "Failed to erase file %s (%s).",
+ bisect_status_file, msg)
end
end
if lfsisdir (bisect_status_path) then
local success, msg = os.remove (bisect_status_path)
if not success then
- report ("info", 2, "bisect",
- "Failed to erase directory %s (%s).",
- bisect_status_path, msg)
+ logreport ("info", 2, "bisect",
+ "Failed to erase directory %s (%s).",
+ bisect_status_path, msg)
end
end
if lfsisfile (bisect_status_file) then
@@ -933,12 +994,12 @@ end
--doc]]--
local bisect_terminate = function (nsteps, culprit)
- report ("info", 1, "bisect",
- "Bisection completed after %d steps.", nsteps)
- report ("info", 0, "bisect",
- "Bad file: %s.", names.nth_font_filename (culprit))
- report ("info", 0, "bisect",
- "Run with --bisect=stop to finish bisection.")
+ logreport ("info", 1, "bisect",
+ "Bisection completed after %d steps.", nsteps)
+ logreport ("info", 0, "bisect",
+ "Bad file: %s.", fonts.names.nth_font_filename (culprit))
+ logreport ("info", 0, "bisect",
+ "Run with --bisect=stop to finish bisection.")
return true, false
end
@@ -949,10 +1010,10 @@ end
--doc]]--
local list_remainder = function (lo, hi)
- local fonts = names.font_slice (lo, hi)
- report ("info", 0, "bisect", "%d fonts left.", hi - lo + 1)
+ local fonts = fonts.names.font_slice (lo, hi)
+ logreport ("info", 0, "bisect", "%d fonts left.", hi - lo + 1)
for i = 1, #fonts do
- report ("info", 1, "bisect", " · %2d: %s", lo, fonts[i])
+ logreport ("info", 1, "bisect", " · %2d: %s", lo, fonts[i])
lo = lo + 1
end
end
@@ -985,8 +1046,9 @@ local bisect_set = function (outcome)
local lo, hi, pivot = unpack (previous)
- report ("info", 3, "bisect", "Previous step %d: lo=%d, hi=%d, pivot=%d.",
- nsteps, lo, hi, pivot)
+ logreport ("info", 3, "bisect",
+ "Previous step %d: lo=%d, hi=%d, pivot=%d.",
+ nsteps, lo, hi, pivot)
if outcome == "bad" then
hi = pivot
@@ -997,9 +1059,9 @@ local bisect_set = function (outcome)
return bisect_terminate (nsteps, lo)
end
pivot = mathfloor ((lo + hi) / 2)
- report ("info", 0, "bisect",
- "Continuing with the lower segment: lo=%d, hi=%d, pivot=%d.",
- lo, hi, pivot)
+ logreport ("info", 0, "bisect",
+ "Continuing with the lower segment: lo=%d, hi=%d, pivot=%d.",
+ lo, hi, pivot)
elseif outcome == "good" then
lo = pivot + 1
if lo >= hi then --- complete
@@ -1009,11 +1071,12 @@ local bisect_set = function (outcome)
return bisect_terminate (nsteps, lo)
end
pivot = mathfloor ((lo + hi) / 2)
- report ("info", 0, "bisect",
- "Continuing with the upper segment: lo=%d, hi=%d, pivot=%d.",
- lo, hi, pivot)
+ logreport ("info", 0, "bisect",
+ "Continuing with the upper segment: lo=%d, hi=%d, pivot=%d.",
+ lo, hi, pivot)
else -- can’t happen
- report ("info", 0, "bisect", "What the hell?", lo, hi, pivot)
+ logreport ("info", 0, "bisect",
+ "What the hell?", lo, hi, pivot)
return false, false
end
@@ -1040,13 +1103,13 @@ local bisect_status = function ()
if nsteps > 1 then
for i = nsteps - 1, 1, -1 do
local step = status[i]
- report ("info", 2, "bisect", "Step %d: lo=%d, hi=%d, pivot=%d.",
- i, unpack (step))
+ logreport ("info", 2, "bisect", "Step %d: lo=%d, hi=%d, pivot=%d.",
+ i, unpack (step))
end
end
local current = status[nsteps]
- report ("info", 0, "bisect", "Step %d: lo=%d, hi=%d, pivot=%d.",
- nsteps, unpack (current))
+ logreport ("info", 0, "bisect", "Step %d: lo=%d, hi=%d, pivot=%d.",
+ nsteps, unpack (current))
return true, false
end
@@ -1072,10 +1135,10 @@ local bisect_run = function ()
current = status[nsteps - 1]
end
local lo, hi, pivot = unpack (current)
- report ("info", 3, "bisect", "Previous step %d: lo=%d, hi=%d, pivot=%d.",
- nsteps, lo, hi, pivot)
- report ("info", 1, "bisect", "Step %d: Testing fonts from %d to %d.",
- currentstep, lo, pivot)
+ logreport ("info", 3, "bisect", "Previous step %d: lo=%d, hi=%d, pivot=%d.",
+ nsteps, lo, hi, pivot)
+ logreport ("info", 1, "bisect", "Step %d: Testing fonts from %d to %d.",
+ currentstep, lo, pivot)
config.luaotfload.misc.bisect = { lo, pivot }
return true, true
end
@@ -1093,35 +1156,38 @@ actions.bisect = function (job)
local mode = job.bisect
local runner = bisect_modes[mode]
if not runner then
- report ("info", 0, "bisect", "Unknown directive %q.", mode)
+ logreport ("info", 0, "bisect", "Unknown directive %q.", mode)
return false, false
end
return runner (job)
end
actions.flush = function (job)
- local success = names.flush_lookup_cache()
+ local success = fonts.names.flush_lookup_cache()
if success then
- local success = names.save_lookups()
+ local success = fonts.names.save_lookups()
if success then
- report ("info", 2, "cache", "Lookup cache emptied")
+ logreport ("info", 2, "cache", "Lookup cache emptied")
return true, true
end
end
return false, false
end
-local cache_directives = {
- ["purge"] = names.purge_cache,
- ["erase"] = names.erase_cache,
- ["show"] = names.show_cache,
-}
+local cache_directives = function ()
+ --- These exist only after initialization.
+ return {
+ ["purge"] = fonts.names.purge_cache,
+ ["erase"] = fonts.names.erase_cache,
+ ["show"] = fonts.names.show_cache,
+ }
+end
actions.cache = function (job)
- local directive = cache_directives[job.cache]
+ local directive = cache_directives()[job.cache]
if not directive or type(directive) ~= "function" then
- report ("info", 2, "cache",
- "Invalid font cache directive %s.", job.cache)
+ logreport ("info", 2, "cache",
+ "Invalid font cache directive %s.", job.cache)
return false, false
end
if directive() then
@@ -1144,7 +1210,7 @@ actions.query = function (job)
features = { },
}
- tmpspec = names.handle_request (tmpspec)
+ tmpspec = fonts.names.handle_request (tmpspec)
if not tmpspec.size then
tmpspec.size = 655360 --- assume 10pt
@@ -1155,38 +1221,38 @@ actions.query = function (job)
if tmpspec.lookup == "name"
or tmpspec.lookup == "anon" --- not *exactly* as resolvers.anon
then
- foundname, subfont = names.resolve_name (tmpspec)
+ foundname, _, success = fonts.names.lookup_font_name (tmpspec)
if foundname then
- foundname, _, success = names.font_file_lookup (foundname)
+ foundname, _, success = fonts.names.lookup_font_file (foundname)
end
elseif tmpspec.lookup == "file" then
- foundname, _, success =
- names.font_file_lookup (tmpspec.name)
+ foundname, _, success = fonts.names.lookup_font_file (tmpspec.name)
end
if success then
- report (false, 0, "resolve", "Font %q found!", query)
+ logreport (false, 0, "resolve", "Font %q found!", query)
if subfont then
- report (false, 0, "resolve",
- "Resolved file name %q, subfont nr. %q",
- foundname, subfont)
+ logreport (false, 0, "resolve",
+ "Resolved file name %q, subfont nr. %q",
+ foundname, subfont)
else
- report (false, 0, "resolve",
- "Resolved file name %q", foundname)
+ logreport (false, 0, "resolve",
+ "Resolved file name %q", foundname)
end
if job.show_info then
show_font_info (foundname, query, job.full_info, job.warnings)
iowrite "\n"
end
else
- report (false, 0, "resolve", "Cannot find %q in index.", query)
- report (false, 0, "resolve",
- "Hint: use the --fuzzy option to display suggestions.",
- query)
+ logreport (false, 0, "resolve", "Cannot find %q in index.", query)
if job.fuzzy == true then
- report (false, 0, "resolve",
- "Looking for close matches, this may take a while ...")
- local _success = names.find_closest(query, job.fuzzy_limit)
+ logreport (false, 0, "resolve",
+ "Looking for close matches, this may take a while ...")
+ local _success = fonts.names.find_closest(query, job.fuzzy_limit)
+ else
+ logreport (false, 0, "resolve",
+ "Hint: use the --fuzzy option to display suggestions.",
+ query)
end
end
return true, true
@@ -1259,14 +1325,13 @@ set_primary_field = function (fields, addme, acc, n)
return acc
end
-local splitcomma = luaotfload.parsers.splitcomma
-
actions.list = function (job)
local criterion = job.criterion
local asked_fields = job.asked_fields
- local name_index = names.data ()
+ local name_index = fonts.names.data ()
if asked_fields then
+ local splitcomma = luaotfload.parsers.splitcomma
asked_fields = lpegmatch(splitcomma, asked_fields)
end
@@ -1276,14 +1341,14 @@ actions.list = function (job)
end
if not name_index then
- name_index = names.load()
+ name_index = fonts.names.load()
end
local mappings = name_index.mappings
local nmappings = #mappings
if criterion == "*" then
- report (false, 1, "list", "All %d entries", nmappings)
+ logreport (false, 1, "list", "All %d entries", nmappings)
for i=1, nmappings do
local entry = mappings[i]
local fields = get_fields(entry, asked_fields)
@@ -1298,12 +1363,12 @@ actions.list = function (job)
criterion = criterion[1]
asked_fields = set_primary_field(asked_fields, criterion)
- report (false, 1, "list", "By %s", criterion)
+ logreport (false, 1, "list", "By %s", criterion)
--- firstly, build a list of fonts to operate on
local targets = { }
if asked_value then --- only those whose value matches
- report (false, 2, "list", "Restricting to value %s", asked_value)
+ logreport (false, 2, "list", "Restricting to value %s", asked_value)
for i=1, nmappings do
local entry = mappings[i]
if entry[criterion]
@@ -1348,7 +1413,7 @@ actions.list = function (job)
end
end
local ntargets = #targets
- report (false, 2, "list", "%d entries", ntargets)
+ logreport (false, 2, "list", "%d entries", ntargets)
--- now, output the collection
for i=1, ntargets do
@@ -1413,6 +1478,7 @@ local process_cmdline = function ( ) -- unit -> jobspec
cache = 1,
conf = 1,
diagnose = 1,
+ dumpconf = 0,
["dry-run"] = "D",
["flush-lookups"] = "l",
fields = 1,
@@ -1484,7 +1550,7 @@ local process_cmdline = function ( ) -- unit -> jobspec
elseif v == "log" then
local str = optarg[n]
if str then
- finalizers = log.set_logout(str, finalizers)
+ finalizers = luaotfload.log.set_logout(str, finalizers)
end
elseif v == "find" then
action_pending["query"] = true
@@ -1517,7 +1583,7 @@ local process_cmdline = function ( ) -- unit -> jobspec
elseif v == "D" then
result.dry_run = true
elseif v == "p" then
- names.set_location_precedence {
+ fonts.names.set_location_precedence {
"local", "texmf", "system"
}
elseif v == "b" then
@@ -1551,15 +1617,20 @@ local process_cmdline = function ( ) -- unit -> jobspec
result.bisect = optarg[n]
action_pending.bisect = true
elseif v == "conf" then
- local extra = stringexplode (optarg[n], ",+")
- if extra then
- local extra_config = result.extra_config
- if extra_config then
- table.append (extra_config, extra)
- else
- result.extra_config = extra
+ local confname = optarg[n]
+ if confname then
+ local extra = stringexplode (optarg[n], ",+")
+ if extra then
+ local extra_config = result.extra_config
+ if extra_config then
+ table.append (extra_config, extra)
+ else
+ result.extra_config = extra
+ end
end
end
+ elseif v == "dumpconf" then
+ action_pending["dumpconf"] = true
elseif v == "print-conf" then
result.print_config = true
end
@@ -1573,6 +1644,8 @@ local process_cmdline = function ( ) -- unit -> jobspec
end
local main = function ( ) -- unit -> int
+ if init_modules () == false then return -42 end
+
local retval = 0
local job = process_cmdline()
@@ -1583,23 +1656,23 @@ local main = function ( ) -- unit -> int
local actionname = action_sequence[i]
local exit = false
if action_pending[actionname] then
- report ("log", 3, "util", "Preparing for task", "%s", actionname)
+ logreport ("log", 3, "util", "Preparing for task", "%s", actionname)
local action = actions[actionname]
local success, continue = action(job)
if not success then
- report (false, 0, "util",
- "Failed to execute task.", "%s", actionname)
+ logreport (false, 0, "util",
+ "Failed to execute task.", "%s", actionname)
retval = -1
exit = true
elseif not continue then
- report (false, 3, "util",
- "Task completed, exiting.", "%s", actionname)
+ logreport (false, 3, "util",
+ "Task completed, exiting.", "%s", actionname)
exit = true
else
- report (false, 3, "util",
- "Task completed successfully.", "%s", actionname)
+ logreport (false, 3, "util",
+ "Task completed successfully.", "%s", actionname)
end
end
if exit then break end
diff --git a/Master/texmf-dist/doc/luatex/luaotfload/NEWS b/Master/texmf-dist/doc/luatex/luaotfload/NEWS
index c460f620675..c1e5000a01f 100644
--- a/Master/texmf-dist/doc/luatex/luaotfload/NEWS
+++ b/Master/texmf-dist/doc/luatex/luaotfload/NEWS
@@ -1,6 +1,25 @@
Change History
--------------
+2015/12/09, luaotfload v2.6
+ * Add ``sign`` target to makefile for automated package signing.
+ * Add ``--dumpconf`` option to luaotfload-tool for generating configuration
+ files.
+ * Move fontloader files to subtree src/fontloader.
+ * New script ``mkimport`` facilitates maintainance of code imported from
+ Context.
+ * Revised letterspacing, now utilizing the ``node.direct`` interface.
+ * Revized colorization of fonts, utilizing ``node.direct`` (Dohyun Kim).
+ * Colorization was moved to the ``post_linebreak_filter`` stage.
+ * Move remaining functionality from ``luaotfload-override`` into
+ initialization.
+ * Write names index if fonts were removed.
+ * Separate module loading from initialization.
+ * Custom fontloader package with the files from Lualibs removed.
+ * Swappable fontloader via configuration option.
+ * Lualibs are now a dependency when used in a TeX run as well.
+ * Respect interword spaces when letterspacing.
+
2014/07/13, luaotfload v2.5
* Remove legacy code.
* Remove compatibility with the old mkluatexfontdb script.
diff --git a/Master/texmf-dist/doc/luatex/luaotfload/README b/Master/texmf-dist/doc/luatex/luaotfload/README
index 75575d2ddc6..71490b81763 100644
--- a/Master/texmf-dist/doc/luatex/luaotfload/README
+++ b/Master/texmf-dist/doc/luatex/luaotfload/README
@@ -33,6 +33,12 @@ Will Robertson <will.robertson@latex-project.org>
Philipp Gesang <philipp.gesang@alumni.uni-heidelberg.de>
Dohyun Kim <nomosnomos@gmail.com>
Reuben Thomas <https://github.com/rrthomas>
+Joseph Wright <joseph.wright@morningstar2.co.uk>
+Manuel Pégourié-Gonnard <mpg@elzevir.fr>
+Olof-Joachim Frahm <olof@macrolet.net>
+Patrick Gundlach <gundlach@speedata.de>
+Philipp Stephani <st_philipp@yahoo.de>
+David Carlisle <d.p.carlisle@gmail.com>
Installation
diff --git a/Master/texmf-dist/doc/luatex/luaotfload/filegraph.pdf b/Master/texmf-dist/doc/luatex/luaotfload/filegraph.pdf
index 50d4160abf2..d795f78cb29 100644
--- a/Master/texmf-dist/doc/luatex/luaotfload/filegraph.pdf
+++ b/Master/texmf-dist/doc/luatex/luaotfload/filegraph.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/luatex/luaotfload/luaotfload.pdf b/Master/texmf-dist/doc/luatex/luaotfload/luaotfload.pdf
index 90ee4617336..ee17939d22c 100644
--- a/Master/texmf-dist/doc/luatex/luaotfload/luaotfload.pdf
+++ b/Master/texmf-dist/doc/luatex/luaotfload/luaotfload.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man1/luaotfload-tool.1 b/Master/texmf-dist/doc/man/man1/luaotfload-tool.1
index dd8abe2999f..ae67fce3b10 100644
--- a/Master/texmf-dist/doc/man/man1/luaotfload-tool.1
+++ b/Master/texmf-dist/doc/man/man1/luaotfload-tool.1
@@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
-.TH LUAOTFLOAD-TOOL 1 "2014-03-30" "2.5" "text processing"
+.TH LUAOTFLOAD-TOOL 1 "2015-12-09" "2.6" "text processing"
.SH NAME
luaotfload-tool \- generate and query the Luaotfload font names database
.
@@ -60,6 +60,8 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
\fBluaotfload\-tool\fP \-\-show\-blacklist
.sp
\fBluaotfload\-tool\fP \-\-diagnose=CHECK
+.sp
+\fBluaotfload\-tool\fP \-\-conf=FILE \-\-dumpconf
.SH DESCRIPTION
.sp
luaotfload\-tool accesses the font names database that is required by
@@ -386,6 +388,16 @@ information about it.
Procedures can be chained by concatenating with
commas, e.g. \fB\-\-diagnose=files,permissions\fP\&.
Specify \fBthorough\fP to run all checks.
+.TP
+.BI \-\-conf\fB= FILE
+Read the configuration from \fIFILE\fP\&. See
+\fBluaotfload.conf\fP(%) for documentation
+concerning the format and available options.
+.TP
+.B \-\-dumpconf
+Print the currently active configuration; the
+output can be saved to a file and used for
+bootstrapping a custom configuration files.
.UNINDENT
.SH FILES
.sp
@@ -404,7 +416,7 @@ Both kinds of files are safe to delete, at the cost of regenerating
them with the next run of \fILuaTeX\fP\&.
.SH SEE ALSO
.sp
-\fBluatex\fP (1), \fBlua\fP (1)
+\fBluaotfload.conf\fP(5), \fBluatex\fP(1), \fBlua\fP(1)
.INDENT 0.0
.IP \(bu 2
\fBtexdoc luaotfload\fP to display the manual for the \fILuaotfload\fP
diff --git a/Master/texmf-dist/doc/man/man1/luaotfload-tool.man1.pdf b/Master/texmf-dist/doc/man/man1/luaotfload-tool.man1.pdf
index 43bc81ffc51..8358d301ab9 100644
--- a/Master/texmf-dist/doc/man/man1/luaotfload-tool.man1.pdf
+++ b/Master/texmf-dist/doc/man/man1/luaotfload-tool.man1.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/man/man5/luaotfload.conf.5 b/Master/texmf-dist/doc/man/man5/luaotfload.conf.5
index 68d40eba01f..89c8fbb5f46 100644
--- a/Master/texmf-dist/doc/man/man5/luaotfload.conf.5
+++ b/Master/texmf-dist/doc/man/man5/luaotfload.conf.5
@@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
-.TH LUAOTFLOAD.CONF 5 "2014-06-09" "2.5" "text processing"
+.TH LUAOTFLOAD.CONF 5 "2015-12-09" "2.6" "text processing"
.SH NAME
luaotfload.conf \- Luaotfload configuration file
.
@@ -83,11 +83,26 @@ To observe the difference in behavior, save above snippet to
.sp
.nf
.ft C
-luaotfload \-\-update \-\-force
+luaotfload\-tool \-\-update \-\-force
.ft P
.fi
.UNINDENT
.UNINDENT
+.sp
+The current configuration can be written to disk using
+\fBluaotfload\-tool\fP:
+.INDENT 0.0
+.INDENT 3.5
+.sp
+.nf
+.ft C
+luaotfload\-tool \-\-dumpconf > luaotfload.conf
+.ft P
+.fi
+.UNINDENT
+.UNINDENT
+.sp
+The result can itself be used as a configuration file.
.SH SYNTAX
.sp
The configuration file syntax follows the common INI format. For a more
@@ -139,8 +154,9 @@ letters as well as dashes (\fB\-\fP).
Variables in belong into a configuration section and their values must
be of a certain type. Some of them have further constraints. For
example, the “color callback” must be a string of one of the values
-\fBpre_linebreak_filter\fP or \fBpre_output_filter\fP, defined in the
-section \fIrun\fP\&.
+\fBpost_linebreak_filter\fP, \fBpre_linebreak_filter\fP, or
+\fBpre_output_filter\fP, defined in the section \fIrun\fP of the
+configuration file.
.sp
Currently, the configuration is organized into four sections:
.INDENT 0.0
@@ -433,7 +449,7 @@ color\-callback
T} T{
s
T} T{
-\fB"pre_linebreak_filter"\fP
+\fB"post_linebreak_filter"\fP
T}
_
T{
@@ -460,14 +476,26 @@ T} T{
\fB"cached"\fP
T}
_
+T{
+fontloader
+T} T{
+s
+T} T{
+\fB"default"\fP
+T}
+_
.TE
.sp
The \fBcolor\-callback\fP option determines the stage at which fonts that
defined with a \fBcolor=xxyyzz\fP feature will be colorized. By default
-this happens in a \fBpre_linebreak_filter\fP but alternatively the
-\fBpre_output_filter\fP may be chosen, which is faster but might produce
-inconsistent output. The latter also was the default in the 1.x series
-of Luaotfload.
+this happens in a \fBpost_linebreak_filter\fP but alternatively the
+\fBpre_linebreak_filter\fP or \fBpre_output_filter\fP may be chosen, which
+is faster but might produce inconsistent output. The
+\fBpre_output_filter\fP used to be the default in the 1.x series of
+Luaotfload, whilst later versions up to and including 2.5 hooked into
+the \fBpre_linebreak_filter\fP which naturally didn’t affect any glyphs
+inserting during hyphenation. Both are kept around as options to
+restore the previous behavior if necessary.
.sp
The \fBdefiner\fP allows for switching the \fBdefine_font\fP callback.
Apart from the default \fBpatch\fP one may also choose the \fBgeneric\fP
@@ -475,6 +503,31 @@ one that comes with the vanilla fontloader. Beware that this might
break tools like Fontspect that rely on the \fBpatch_font\fP callback
provided by Luaotfload to perform important corrections on font data.
.sp
+The fontloader backend can be selected by setting the value of
+\fBfontloader\fP\&. The most important choices are \fBdefault\fP, which will
+load the dedicated Luaotfload fontloader, and \fBreference\fP, the
+upstream package as shipped with Luaotfload. Other than those, a file
+name accessible via kpathsea can be specified.
+.sp
+Alternatively, the individual files that constitute the fontloader can
+be loaded directly. While less efficient, this greatly aids debugging
+since error messages will reference the actual line numbers of the
+source files and explanatory comments are not stripped. Currently,
+three distinct loading strategies are available: \fBunpackaged\fP will
+load the batch that is part of Luaotfload. These contain the identical
+source code that the reference fontloader has been compiled from.
+Another option, \fBcontext\fP will attempt to load the same files by
+their names in the Context format from the search path. Consequently
+this option allows to use the version of Context that comes with the
+TeX distribution. Distros tend to prefer the stable version (“current”
+in Context jargon) of those files so certain bugs encountered in the
+more bleeding edge Luaotfload can be avoided this way. A third option
+is to use \fBcontext\fP with a colon to specify a directory prefix where
+the \fITEXMF\fP is located that the files should be loaded from, e. g.
+\fBcontext:~/context/tex/texmf\-context\fP\&. This can be used when
+referencing another distribution like the Context minimals that is
+installed under a different path not indexed by kpathsea.
+.sp
The value of \fBlog\-level\fP sets the default verbosity of messages
printed by Luaotfload. Only messages defined with a verbosity of less
than or equal to the supplied value will be output on the terminal.
diff --git a/Master/texmf-dist/doc/man/man5/luaotfload.conf.man5.pdf b/Master/texmf-dist/doc/man/man5/luaotfload.conf.man5.pdf
deleted file mode 100644
index ed914cc3db1..00000000000
--- a/Master/texmf-dist/doc/man/man5/luaotfload.conf.man5.pdf
+++ /dev/null
Binary files differ
diff --git a/Master/texmf-dist/scripts/luaotfload/luaotfload-tool.lua b/Master/texmf-dist/scripts/luaotfload/luaotfload-tool.lua
index 8a65298d03a..69b6c976fb1 100755
--- a/Master/texmf-dist/scripts/luaotfload/luaotfload-tool.lua
+++ b/Master/texmf-dist/scripts/luaotfload/luaotfload-tool.lua
@@ -2,15 +2,13 @@
-----------------------------------------------------------------------
-- FILE: luaotfload-tool.lua
-- DESCRIPTION: database functionality
--- REQUIREMENTS: luaotfload 2.5
+-- REQUIREMENTS: luaotfload 2.6
-- AUTHOR: Khaled Hosny, Élie Roux, Philipp Gesang
--- VERSION: 2.5
-- LICENSE: GPL v2.0
--- MODIFIED: 2014-08-10 23:13:18+0200
-----------------------------------------------------------------------
luaotfload = luaotfload or { }
-local version = "2.5-4" --- <int: major>.<int: minor>-<int: fixes>
+local version = "2.6"
luaotfload.version = version
luaotfload.self = "luaotfload-tool"
@@ -76,21 +74,13 @@ else -- 5.2
runtime = { "stock", _VERSION }
end
-
local C, Ct, P, S = lpeg.C, lpeg.Ct, lpeg.P, lpeg.S
local lpegmatch = lpeg.match
-local loader_file = "luatexbase.loader.lua"
-local loader_path = assert(kpsefind_file(loader_file, "lua"),
- "File '"..loader_file.."' not found")
-
-
string.quoted = string.quoted or function (str)
return string.format("%q",str)
end
-require (loader_path)
-
--[[doc--
XXX:
@@ -109,6 +99,7 @@ config.lualibs.prefer_merged = true
config.lualibs.load_extended = true
require "lualibs"
+
local iosavedata = io.savedata
local lfsisdir = lfs.isdir
local lfsisfile = lfs.isfile
@@ -138,22 +129,69 @@ local backup = {
}
texio.write, texio.write_nl = dummy_function, dummy_function
-require"luaotfload-basics-gen.lua"
+require "fontloader-basics-gen.lua"
texio.write, texio.write_nl = backup.write, backup.write_nl
utilities = backup.utilities
-require "luaotfload-log.lua" --- this populates the luaotfload.log.* namespace
-require "luaotfload-parsers" --- fonts.conf, configuration, and request syntax
-require "luaotfload-configuration" --- configuration file handling
-require "luaotfload-database"
+fonts = { names = { } } -- for db; normally provided by the fontloaders
+
+local require_init = { }
+
+local loadmodule = function (name)
+ local v = require ("luaotfload-" .. name)
+ if v then
+ local mod = { }
+ local tv = type (v)
+ if tv == "table" then
+ mod.name = name
+ mod.init = v.init
+ require_init [#require_init + 1] = mod
+ elseif tv == "function" then
+ mod.name = name
+ mod.init = v
+ require_init [#require_init + 1] = mod
+ end
+ end
+end
+
require "alt_getopt"
-local names = fonts.names
-local sanitize_fontname = names.sanitize_fontname
+loadmodule "log.lua" --- this populates the luaotfload.log.* namespace
+loadmodule "parsers" --- fonts.conf, configuration, and request syntax
+loadmodule "configuration" --- configuration file handling
+loadmodule "database"
+loadmodule "resolvers" --- Font lookup
+
+local logreport
+
+local init_modules = function ()
+ --- NB we don’t command the logger at this point.
+ local todo = #require_init
+ local ret = true
+ for i = 1, todo do
+ local mod = require_init[i]
+ local name = mod.name
+ local init = mod.init
+ if type (init) ~= "function" then
+ error ("luaotfload broken; module "
+ .. name .. " missing initializers!")
+ end
+ local v = mod.init ()
+ if v == true then
+ --- evaluated well
+ elseif type (v) == "table" then
+ luaotfload[name] = v
+ else
+ error ("luaotfload broken; initialization of module "
+ .. name .. " returned " .. tostring (v) .. ".")
+ return false
+ end
+ end
+ logreport = luaotfload.log.report
+ return ret
+end
-local log = luaotfload.log
-local report = log.report
local help_messages = {
["luaotfload-tool"] = [[
@@ -170,8 +208,7 @@ Usage: %s [OPTIONS...]
-q --quiet don't output anything
-v --verbose=LEVEL be more verbose (print the searched directories)
- -vv print the loaded fonts
- -vvv print all steps of directory searching
+ -v, -vv .. -vvvvvvvvv set loglevel in unary
--log=stdout redirect log output to stdout
-V --version print version and exit
@@ -191,7 +228,7 @@ Usage: %s [OPTIONS...]
-c --no-compress do not gzip index file (text version only)
-l --flush-lookups empty lookup cache of font requests
-D --dry-run skip loading of fonts, just scan
- --formats=[+|-]EXTENSIONS set, add, or subtract formats to index
+ --formats=[+|-]EXTENSIONS set, add, or subtract file formats
-p --prefer-texmf prefer fonts in the TEXMF over system fonts
--max-fonts=N process at most N font files
@@ -259,7 +296,7 @@ local help_msg = function (version)
luaotfload.self,
names_gzip,
names_bin,
- caches.getwritablepath (config.luaotfload.cache_dir)))
+ caches.getwritablepath (config.luaotfload.paths.cache_dir, "")))
end
local about = [[
@@ -272,7 +309,7 @@ local about = [[
local version_msg = function ( )
local out = function (...) texiowrite_nl (stringformat (...)) end
local uname = os.uname ()
- local meta = names.getmetadata ()
+ local meta = fonts.names.getmetadata ()
out (about, luaotfload.self)
out ("%s version: %q", luaotfload.self, version)
out ("Revision: %q", config.luaotfload.status.notes.revision)
@@ -288,10 +325,14 @@ local version_msg = function ( )
local var = uname_vars[i]
out (" + %8s: %s", var, uname[var])
end
- out ("Index: version=%q created=%q modified=%q",
- config.luaotfload.status.notes.revision,
- meta.created or "ages ago",
- meta.modified or "ages ago")
+ if meta == false then
+ out("No database metadata available.")
+ else
+ out ("Index: version=%q created=%q modified=%q",
+ config.luaotfload.status.notes.revision,
+ meta.created or "ages ago",
+ meta.modified or "ages ago")
+ end
out ""
end
@@ -669,7 +710,7 @@ subfont_by_name = function (lst, askedname, n)
local font = lst[n]
if font then
- if sanitize_fontname (font.fullname) == askedname then
+ if fonts.names.sanitize_fontname (font.fullname) == askedname then
return font
end
return subfont_by_name (lst, askedname, n+1)
@@ -686,10 +727,10 @@ The font info knows two levels of detail:
--doc]]--
local show_font_info = function (basename, askedname, detail, warnings)
- local filenames = names.data().files
+ local filenames = fonts.names.data().files
local index = filenames.base[basename]
local fullname = filenames.full[index]
- askedname = sanitize_fontname (askedname)
+ askedname = fonts.names.sanitize_fontname (askedname)
if not fullname then -- texmf
fullname = resolvers.findfile(basename)
end
@@ -699,9 +740,9 @@ local show_font_info = function (basename, askedname, detail, warnings)
if nfonts > 0 then -- true type collection
local subfont
if askedname then
- report (true, 1, "resolve",
- [[%s is part of the font collection %s]],
- askedname, basename)
+ logreport (true, 1, "resolve",
+ [[%s is part of the font collection %s]],
+ askedname, basename)
subfont = subfont_by_name(shortinfo, askedname)
end
if subfont then
@@ -710,11 +751,11 @@ local show_font_info = function (basename, askedname, detail, warnings)
show_full_info(fullname, subfont, warnings)
end
else -- list all subfonts
- report (true, 1, "resolve",
- [[%s is a font collection]], basename)
+ logreport (true, 1, "resolve",
+ [[%s is a font collection]], basename)
for subfont = 1, nfonts do
- report (true, 1, "resolve",
- [[Showing info for font no. %d]], n)
+ logreport (true, 1, "resolve",
+ [[Showing info for font no. %d]], n)
show_info_items(shortinfo[subfont])
if detail == true then
show_full_info(fullname, subfont, warnings)
@@ -728,7 +769,7 @@ local show_font_info = function (basename, askedname, detail, warnings)
end
end
else
- report (true, 1, "resolve", "Font %s not found", filename)
+ logreport (true, 1, "resolve", "Font %s not found", filename)
end
end
@@ -739,9 +780,10 @@ set.
--]]--
local action_sequence = {
- "config", "loglevel", "help", "version",
- "diagnose", "blacklist", "cache", "flush",
- "bisect", "generate", "list", "query",
+ "config" , "loglevel" , "help" , "version" ,
+ "dumpconf" , "diagnose" , "blacklist" , "cache" ,
+ "flush" , "bisect" , "generate" , "list" ,
+ "query" ,
}
local action_pending = tabletohash(action_sequence, false)
@@ -755,9 +797,9 @@ local actions = { } --- (jobspec -> (bool * bool)) list
actions.loglevel = function (job)
local lvl = job.log_level
if lvl then
- log.set_loglevel(lvl)
- report ("info", 3, "util", "Setting the log level to %d.", lvl)
- report ("log", 2, "util", "Lua=%q", _VERSION)
+ luaotfload.log.set_loglevel(lvl)
+ logreport ("info", 3, "util", "Setting the log level to %d.", lvl)
+ logreport ("log", 2, "util", "Lua=%q", _VERSION)
end
return true, true
end
@@ -781,25 +823,32 @@ actions.version = function (job)
return true, false
end
+actions.dumpconf = function (job)
+ config.actions.dump ()
+ return true, false
+end
+
actions.help = function (job)
help_msg (job.help_version or "luaotfload-tool")
return true, false
end
actions.blacklist = function (job)
- names.read_blacklist()
+ fonts.names.read_blacklist()
local n = 0
- for n, entry in next, tablesortedkeys(names.blacklist) do
+ for n, entry in next, tablesortedkeys(fonts.names.blacklist) do
iowrite (stringformat("(%d %s)\n", n, entry))
end
return true, false
end
actions.generate = function (job)
- local _ = names.update (fontnames, job.force_reload, job.dry_run)
- local namedata = names.data ()
+ local _ = fonts.names.update (fontnames, job.force_reload, job.dry_run)
+ local namedata = fonts.names.data ()
if namedata then
- report ("info", 2, "db", "Fonts in the database: %i", #namedata.mappings)
+ logreport ("info", 2, "db",
+ "Fonts in the database: %i",
+ #namedata.mappings)
return true, true
end
return false, false
@@ -809,7 +858,7 @@ end
--- bisect mode
-------------------------------------------------------------------------------
-local bisect_status_path = caches.getwritablepath "bisect"
+local bisect_status_path = caches.getwritablepath ("bisect", "")
local bisect_status_file = bisect_status_path .."/" .. "luaotfload-bisect-status.lua"
local bisect_status_fmt = [[
--[==[-------------------------------------------------------------------------
@@ -835,12 +884,14 @@ local write_bisect_status = function (data)
osdate ("%Y-%m-d %H:%M:%S", os.time ()),
payload)
if status and iosavedata (bisect_status_file, status) then
- report ("info", 4, "bisect",
- "Bisection state written to %s.", bisect_status_file)
+ logreport ("info", 4, "bisect",
+ "Bisection state written to %s.",
+ bisect_status_file)
return true
end
- report ("info", 0, "bisect",
- "Failed to write bisection state to %s.", bisect_status_file)
+ logreport ("info", 0, "bisect",
+ "Failed to write bisection state to %s.",
+ bisect_status_file)
return false
end
@@ -852,16 +903,22 @@ end
--- unit -> state list
local read_bisect_status = function ()
- report ("info", 4, "bisect", "Testing for status file: %q.", bisect_status_file)
+ logreport ("info", 4, "bisect",
+ "Testing for status file: %q.",
+ bisect_status_file)
if not lfsisfile (bisect_status_file) then
- report ("info", 2, "bisect", "No such file: %q.", bisect_status_file)
- report ("info", 0, "bisect", "Not in bisect mode.")
+ logreport ("info", 2, "bisect",
+ "No such file: %q.", bisect_status_file)
+ logreport ("info", 0, "bisect",
+ "Not in bisect mode.")
return false
end
- report ("info", 4, "bisect", "Reading status file: %q.", bisect_status_file)
+ logreport ("info", 4, "bisect",
+ "Reading status file: %q.", bisect_status_file)
local success, status = pcall (dofile, bisect_status_file)
if not success then
- report ("info", 0, "bisect", "Could not read status file.")
+ logreport ("info", 0, "bisect",
+ "Could not read status file.")
return false
end
return status
@@ -876,19 +933,21 @@ end
local bisect_start = function ()
if lfsisfile (bisect_status_file) then
- report ("info", 0, "bisect",
- "Bisect session in progress.",
- bisect_status_file)
- report ("info", 0, "bisect",
- "Use --bisect=stop to erase it before starting over.")
+ logreport ("info", 0, "bisect",
+ "Bisect session in progress.",
+ bisect_status_file)
+ logreport ("info", 0, "bisect",
+ "Use --bisect=stop to erase it before starting over.")
return false, false
end
- report ("info", 2, "bisect",
- "Starting bisection of font database %q.", bisect_status_file)
- local n = names.count_font_files ()
+ logreport ("info", 2, "bisect",
+ "Starting bisection of font database %q.",
+ bisect_status_file)
+ local n = fonts.names.count_font_files ()
local pivot = mathfloor (n / 2)
local data = { { 1, n, pivot } }
- report ("info", 0, "bisect", "Initializing pivot to %d.", pivot)
+ logreport ("info", 0, "bisect",
+ "Initializing pivot to %d.", pivot)
if write_bisect_status (data) then
return true, false
end
@@ -902,21 +961,23 @@ end
--doc]]--
local bisect_stop = function ()
- report ("info", 3, "bisect", "Erasing bisection state at %s.", bisect_status_file)
+ logreport ("info", 3, "bisect",
+ "Erasing bisection state at %s.",
+ bisect_status_file)
if lfsisfile (bisect_status_file) then
local success, msg = os.remove (bisect_status_file)
if not success then
- report ("info", 2, "bisect",
- "Failed to erase file %s (%s).",
- bisect_status_file, msg)
+ logreport ("info", 2, "bisect",
+ "Failed to erase file %s (%s).",
+ bisect_status_file, msg)
end
end
if lfsisdir (bisect_status_path) then
local success, msg = os.remove (bisect_status_path)
if not success then
- report ("info", 2, "bisect",
- "Failed to erase directory %s (%s).",
- bisect_status_path, msg)
+ logreport ("info", 2, "bisect",
+ "Failed to erase directory %s (%s).",
+ bisect_status_path, msg)
end
end
if lfsisfile (bisect_status_file) then
@@ -933,12 +994,12 @@ end
--doc]]--
local bisect_terminate = function (nsteps, culprit)
- report ("info", 1, "bisect",
- "Bisection completed after %d steps.", nsteps)
- report ("info", 0, "bisect",
- "Bad file: %s.", names.nth_font_filename (culprit))
- report ("info", 0, "bisect",
- "Run with --bisect=stop to finish bisection.")
+ logreport ("info", 1, "bisect",
+ "Bisection completed after %d steps.", nsteps)
+ logreport ("info", 0, "bisect",
+ "Bad file: %s.", fonts.names.nth_font_filename (culprit))
+ logreport ("info", 0, "bisect",
+ "Run with --bisect=stop to finish bisection.")
return true, false
end
@@ -949,10 +1010,10 @@ end
--doc]]--
local list_remainder = function (lo, hi)
- local fonts = names.font_slice (lo, hi)
- report ("info", 0, "bisect", "%d fonts left.", hi - lo + 1)
+ local fonts = fonts.names.font_slice (lo, hi)
+ logreport ("info", 0, "bisect", "%d fonts left.", hi - lo + 1)
for i = 1, #fonts do
- report ("info", 1, "bisect", " · %2d: %s", lo, fonts[i])
+ logreport ("info", 1, "bisect", " · %2d: %s", lo, fonts[i])
lo = lo + 1
end
end
@@ -985,8 +1046,9 @@ local bisect_set = function (outcome)
local lo, hi, pivot = unpack (previous)
- report ("info", 3, "bisect", "Previous step %d: lo=%d, hi=%d, pivot=%d.",
- nsteps, lo, hi, pivot)
+ logreport ("info", 3, "bisect",
+ "Previous step %d: lo=%d, hi=%d, pivot=%d.",
+ nsteps, lo, hi, pivot)
if outcome == "bad" then
hi = pivot
@@ -997,9 +1059,9 @@ local bisect_set = function (outcome)
return bisect_terminate (nsteps, lo)
end
pivot = mathfloor ((lo + hi) / 2)
- report ("info", 0, "bisect",
- "Continuing with the lower segment: lo=%d, hi=%d, pivot=%d.",
- lo, hi, pivot)
+ logreport ("info", 0, "bisect",
+ "Continuing with the lower segment: lo=%d, hi=%d, pivot=%d.",
+ lo, hi, pivot)
elseif outcome == "good" then
lo = pivot + 1
if lo >= hi then --- complete
@@ -1009,11 +1071,12 @@ local bisect_set = function (outcome)
return bisect_terminate (nsteps, lo)
end
pivot = mathfloor ((lo + hi) / 2)
- report ("info", 0, "bisect",
- "Continuing with the upper segment: lo=%d, hi=%d, pivot=%d.",
- lo, hi, pivot)
+ logreport ("info", 0, "bisect",
+ "Continuing with the upper segment: lo=%d, hi=%d, pivot=%d.",
+ lo, hi, pivot)
else -- can’t happen
- report ("info", 0, "bisect", "What the hell?", lo, hi, pivot)
+ logreport ("info", 0, "bisect",
+ "What the hell?", lo, hi, pivot)
return false, false
end
@@ -1040,13 +1103,13 @@ local bisect_status = function ()
if nsteps > 1 then
for i = nsteps - 1, 1, -1 do
local step = status[i]
- report ("info", 2, "bisect", "Step %d: lo=%d, hi=%d, pivot=%d.",
- i, unpack (step))
+ logreport ("info", 2, "bisect", "Step %d: lo=%d, hi=%d, pivot=%d.",
+ i, unpack (step))
end
end
local current = status[nsteps]
- report ("info", 0, "bisect", "Step %d: lo=%d, hi=%d, pivot=%d.",
- nsteps, unpack (current))
+ logreport ("info", 0, "bisect", "Step %d: lo=%d, hi=%d, pivot=%d.",
+ nsteps, unpack (current))
return true, false
end
@@ -1072,10 +1135,10 @@ local bisect_run = function ()
current = status[nsteps - 1]
end
local lo, hi, pivot = unpack (current)
- report ("info", 3, "bisect", "Previous step %d: lo=%d, hi=%d, pivot=%d.",
- nsteps, lo, hi, pivot)
- report ("info", 1, "bisect", "Step %d: Testing fonts from %d to %d.",
- currentstep, lo, pivot)
+ logreport ("info", 3, "bisect", "Previous step %d: lo=%d, hi=%d, pivot=%d.",
+ nsteps, lo, hi, pivot)
+ logreport ("info", 1, "bisect", "Step %d: Testing fonts from %d to %d.",
+ currentstep, lo, pivot)
config.luaotfload.misc.bisect = { lo, pivot }
return true, true
end
@@ -1093,35 +1156,38 @@ actions.bisect = function (job)
local mode = job.bisect
local runner = bisect_modes[mode]
if not runner then
- report ("info", 0, "bisect", "Unknown directive %q.", mode)
+ logreport ("info", 0, "bisect", "Unknown directive %q.", mode)
return false, false
end
return runner (job)
end
actions.flush = function (job)
- local success = names.flush_lookup_cache()
+ local success = fonts.names.flush_lookup_cache()
if success then
- local success = names.save_lookups()
+ local success = fonts.names.save_lookups()
if success then
- report ("info", 2, "cache", "Lookup cache emptied")
+ logreport ("info", 2, "cache", "Lookup cache emptied")
return true, true
end
end
return false, false
end
-local cache_directives = {
- ["purge"] = names.purge_cache,
- ["erase"] = names.erase_cache,
- ["show"] = names.show_cache,
-}
+local cache_directives = function ()
+ --- These exist only after initialization.
+ return {
+ ["purge"] = fonts.names.purge_cache,
+ ["erase"] = fonts.names.erase_cache,
+ ["show"] = fonts.names.show_cache,
+ }
+end
actions.cache = function (job)
- local directive = cache_directives[job.cache]
+ local directive = cache_directives()[job.cache]
if not directive or type(directive) ~= "function" then
- report ("info", 2, "cache",
- "Invalid font cache directive %s.", job.cache)
+ logreport ("info", 2, "cache",
+ "Invalid font cache directive %s.", job.cache)
return false, false
end
if directive() then
@@ -1144,7 +1210,7 @@ actions.query = function (job)
features = { },
}
- tmpspec = names.handle_request (tmpspec)
+ tmpspec = fonts.names.handle_request (tmpspec)
if not tmpspec.size then
tmpspec.size = 655360 --- assume 10pt
@@ -1155,38 +1221,38 @@ actions.query = function (job)
if tmpspec.lookup == "name"
or tmpspec.lookup == "anon" --- not *exactly* as resolvers.anon
then
- foundname, subfont = names.resolve_name (tmpspec)
+ foundname, _, success = fonts.names.lookup_font_name (tmpspec)
if foundname then
- foundname, _, success = names.font_file_lookup (foundname)
+ foundname, _, success = fonts.names.lookup_font_file (foundname)
end
elseif tmpspec.lookup == "file" then
- foundname, _, success =
- names.font_file_lookup (tmpspec.name)
+ foundname, _, success = fonts.names.lookup_font_file (tmpspec.name)
end
if success then
- report (false, 0, "resolve", "Font %q found!", query)
+ logreport (false, 0, "resolve", "Font %q found!", query)
if subfont then
- report (false, 0, "resolve",
- "Resolved file name %q, subfont nr. %q",
- foundname, subfont)
+ logreport (false, 0, "resolve",
+ "Resolved file name %q, subfont nr. %q",
+ foundname, subfont)
else
- report (false, 0, "resolve",
- "Resolved file name %q", foundname)
+ logreport (false, 0, "resolve",
+ "Resolved file name %q", foundname)
end
if job.show_info then
show_font_info (foundname, query, job.full_info, job.warnings)
iowrite "\n"
end
else
- report (false, 0, "resolve", "Cannot find %q in index.", query)
- report (false, 0, "resolve",
- "Hint: use the --fuzzy option to display suggestions.",
- query)
+ logreport (false, 0, "resolve", "Cannot find %q in index.", query)
if job.fuzzy == true then
- report (false, 0, "resolve",
- "Looking for close matches, this may take a while ...")
- local _success = names.find_closest(query, job.fuzzy_limit)
+ logreport (false, 0, "resolve",
+ "Looking for close matches, this may take a while ...")
+ local _success = fonts.names.find_closest(query, job.fuzzy_limit)
+ else
+ logreport (false, 0, "resolve",
+ "Hint: use the --fuzzy option to display suggestions.",
+ query)
end
end
return true, true
@@ -1259,14 +1325,13 @@ set_primary_field = function (fields, addme, acc, n)
return acc
end
-local splitcomma = luaotfload.parsers.splitcomma
-
actions.list = function (job)
local criterion = job.criterion
local asked_fields = job.asked_fields
- local name_index = names.data ()
+ local name_index = fonts.names.data ()
if asked_fields then
+ local splitcomma = luaotfload.parsers.splitcomma
asked_fields = lpegmatch(splitcomma, asked_fields)
end
@@ -1276,14 +1341,14 @@ actions.list = function (job)
end
if not name_index then
- name_index = names.load()
+ name_index = fonts.names.load()
end
local mappings = name_index.mappings
local nmappings = #mappings
if criterion == "*" then
- report (false, 1, "list", "All %d entries", nmappings)
+ logreport (false, 1, "list", "All %d entries", nmappings)
for i=1, nmappings do
local entry = mappings[i]
local fields = get_fields(entry, asked_fields)
@@ -1298,12 +1363,12 @@ actions.list = function (job)
criterion = criterion[1]
asked_fields = set_primary_field(asked_fields, criterion)
- report (false, 1, "list", "By %s", criterion)
+ logreport (false, 1, "list", "By %s", criterion)
--- firstly, build a list of fonts to operate on
local targets = { }
if asked_value then --- only those whose value matches
- report (false, 2, "list", "Restricting to value %s", asked_value)
+ logreport (false, 2, "list", "Restricting to value %s", asked_value)
for i=1, nmappings do
local entry = mappings[i]
if entry[criterion]
@@ -1348,7 +1413,7 @@ actions.list = function (job)
end
end
local ntargets = #targets
- report (false, 2, "list", "%d entries", ntargets)
+ logreport (false, 2, "list", "%d entries", ntargets)
--- now, output the collection
for i=1, ntargets do
@@ -1413,6 +1478,7 @@ local process_cmdline = function ( ) -- unit -> jobspec
cache = 1,
conf = 1,
diagnose = 1,
+ dumpconf = 0,
["dry-run"] = "D",
["flush-lookups"] = "l",
fields = 1,
@@ -1484,7 +1550,7 @@ local process_cmdline = function ( ) -- unit -> jobspec
elseif v == "log" then
local str = optarg[n]
if str then
- finalizers = log.set_logout(str, finalizers)
+ finalizers = luaotfload.log.set_logout(str, finalizers)
end
elseif v == "find" then
action_pending["query"] = true
@@ -1517,7 +1583,7 @@ local process_cmdline = function ( ) -- unit -> jobspec
elseif v == "D" then
result.dry_run = true
elseif v == "p" then
- names.set_location_precedence {
+ fonts.names.set_location_precedence {
"local", "texmf", "system"
}
elseif v == "b" then
@@ -1551,15 +1617,20 @@ local process_cmdline = function ( ) -- unit -> jobspec
result.bisect = optarg[n]
action_pending.bisect = true
elseif v == "conf" then
- local extra = stringexplode (optarg[n], ",+")
- if extra then
- local extra_config = result.extra_config
- if extra_config then
- table.append (extra_config, extra)
- else
- result.extra_config = extra
+ local confname = optarg[n]
+ if confname then
+ local extra = stringexplode (optarg[n], ",+")
+ if extra then
+ local extra_config = result.extra_config
+ if extra_config then
+ table.append (extra_config, extra)
+ else
+ result.extra_config = extra
+ end
end
end
+ elseif v == "dumpconf" then
+ action_pending["dumpconf"] = true
elseif v == "print-conf" then
result.print_config = true
end
@@ -1573,6 +1644,8 @@ local process_cmdline = function ( ) -- unit -> jobspec
end
local main = function ( ) -- unit -> int
+ if init_modules () == false then return -42 end
+
local retval = 0
local job = process_cmdline()
@@ -1583,23 +1656,23 @@ local main = function ( ) -- unit -> int
local actionname = action_sequence[i]
local exit = false
if action_pending[actionname] then
- report ("log", 3, "util", "Preparing for task", "%s", actionname)
+ logreport ("log", 3, "util", "Preparing for task", "%s", actionname)
local action = actions[actionname]
local success, continue = action(job)
if not success then
- report (false, 0, "util",
- "Failed to execute task.", "%s", actionname)
+ logreport (false, 0, "util",
+ "Failed to execute task.", "%s", actionname)
retval = -1
exit = true
elseif not continue then
- report (false, 3, "util",
- "Task completed, exiting.", "%s", actionname)
+ logreport (false, 3, "util",
+ "Task completed, exiting.", "%s", actionname)
exit = true
else
- report (false, 3, "util",
- "Task completed successfully.", "%s", actionname)
+ logreport (false, 3, "util",
+ "Task completed successfully.", "%s", actionname)
end
end
if exit then break end
diff --git a/Master/texmf-dist/scripts/luaotfload/mkcharacters b/Master/texmf-dist/scripts/luaotfload/mkcharacters
index abed2c936ba..59582f211cb 100755
--- a/Master/texmf-dist/scripts/luaotfload/mkcharacters
+++ b/Master/texmf-dist/scripts/luaotfload/mkcharacters
@@ -5,8 +5,6 @@
-- DESCRIPTION: import parts of char-def.lua
-- REQUIREMENTS: lua, ConTeXt, the lualibs package
-- AUTHOR: Philipp Gesang (Phg), <phg42.2a@gmail.com>
--- VERSION: 2.5
--- MODIFIED: 2014-02-11 07:24:25+0100
-----------------------------------------------------------------------
-- we create a stripped-down version of char-def.lua
-----------------------------------------------------------------------
@@ -15,7 +13,7 @@
-- config
-----------------------------------------------------------------------
local charfile = "./build/luaotfload-characters.lua"
-local chardef = "/home/phg/base/char-def.lua"
+local chardef = arg[1]
--- for every code point char-def.lua provides a set of fields. they
--- are:
@@ -65,6 +63,11 @@ for _, lib in next, { "lualibs-lua.lua",
require(found)
end
+if not chardef then
+ chardef = kpse.expand_path("~/context/tex/texmf-context/tex/context/base/")
+ .. "/char-def.lua"
+end
+
if not (chardef and lfs.isfile(chardef)) then
--- we could grab the file from contextgarden but as Context is part
--- of TL it’s not worth bothering
@@ -72,6 +75,9 @@ if not (chardef and lfs.isfile(chardef)) then
"Could not find ConTeXt.")
end
+io.write(string.format("extracting data from char-def.lua at %s\n",
+ chardef))
+
-----------------------------------------------------------------------
-- functionality
-----------------------------------------------------------------------
diff --git a/Master/texmf-dist/scripts/luaotfload/mkglyphlist b/Master/texmf-dist/scripts/luaotfload/mkglyphlist
index 8fde098f1a5..f66a686322b 100755
--- a/Master/texmf-dist/scripts/luaotfload/mkglyphlist
+++ b/Master/texmf-dist/scripts/luaotfload/mkglyphlist
@@ -5,8 +5,6 @@
-- DESCRIPTION: part of the luaotfload package
-- REQUIREMENTS: lua, lpeg, luasocket, the lualibs package
-- AUTHOR: Philipp Gesang (Phg), <phg42.2a@gmail.com>
--- VERSION: 2.5
--- MODIFIED: 2014-02-11 06:44:50+0100
-----------------------------------------------------------------------
-- interesting thread on the Context list:
-- http://www.ntg.nl/pipermail/ntg-context/2008/029057.html
diff --git a/Master/texmf-dist/scripts/luaotfload/mkimport b/Master/texmf-dist/scripts/luaotfload/mkimport
new file mode 100755
index 00000000000..abe1608a60f
--- /dev/null
+++ b/Master/texmf-dist/scripts/luaotfload/mkimport
@@ -0,0 +1,871 @@
+#!/usr/bin/env texlua
+-------------------------------------------------------------------------------
+-- FILE: mkimport.lua
+-- USAGE: texlua ./mkimport.lua
+-- DESCRIPTION: check luaotfload imports against Context
+-- REQUIREMENTS: luatex, the lualibs package, Context MkIV
+-- AUTHOR: Philipp Gesang (Phg), <phg@phi-gamma.net>
+-------------------------------------------------------------------------------
+--
+
+-------------------------------------------------------------------------------
+--- PURPOSE
+---
+--- - Facilitate detecting changes in the fontloader source.
+--- - Assist in updating source code and (partially) automate importing.
+---
+--- - Account for files in the plain fontloader distribution, alert in case of
+--- additions or deletions.
+---
+--- - Fontloader packaging.
+---
+-------------------------------------------------------------------------------
+
+local debug = false
+
+kpse.set_program_name "luatex"
+
+local lfs = require "lfs"
+local md5 = require "md5"
+local os = require "os"
+
+require "lualibs"
+
+local filedirname = file.dirname
+local fileiswritable = file.is_writable
+local ioloaddata = io.loaddata
+local iopopen = io.popen
+local iowrite = io.write
+local lfschdir = lfs.chdir
+local lfscurrentdir = lfs.currentdir
+local lfsisdir = lfs.isdir
+local lfsisfile = lfs.isfile
+local md5sumhexa = md5.sumhexa
+local osdate = os.date
+local osgettimeofday = os.gettimeofday
+local osrename = os.rename
+local stringformat = string.format
+local tableconcat = table.concat
+
+-------------------------------------------------------------------------------
+-- config
+-------------------------------------------------------------------------------
+
+local parms = { }
+local our_prefix = "fontloader"
+local luatex_fonts_prefix = "luatex"
+local fontloader_subdir = "src/fontloader"
+
+local origin_paths = {
+ context = "tex/context/base",
+ fontloader = "tex/generic/context/luatex",
+}
+
+local subdirs = {
+ "runtime",
+ "misc"
+}
+
+local searchdirs = {
+ --- order is important!
+ fontloader_subdir,
+}
+
+local prefixes = {
+ context = nil,
+ fontloader = "luatex",
+}
+
+--[[doc--
+
+ The output name is fixed so we have to deal with it but maybe we
+ can get a patch to mtx-package upstreamed in the future. In any
+ case, we are content with renaming the result for the time being.
+
+ The target name is constructed on the fly from the current date.
+ TODO It should be possible to supply a name and possibly
+ destination path on the command line.
+
+ Paths are relative to the base directory (``$PWD``).
+
+--doc]]--
+
+local loader_merge_name = "luaotfload-package.lua"
+local loader_output_name = "luaotfload-package-merged.lua"
+local loader_target_name = "fontloader-%s.lua"
+local loader_orig_dir = "/src/fontloader/"
+local loader_target_dir = "/build/"
+
+-------------------------------------------------------------------------------
+-- helpers
+-------------------------------------------------------------------------------
+
+local die = function (...)
+ io.stderr:write "[\x1b[1;30;41mfatal error\x1b[0m]: "
+ io.stderr:write (stringformat (...))
+ io.stderr:write "\naborting.\n"
+ os.exit (1)
+end
+
+local emphasis = function (txt)
+ return stringformat("\x1b[1m%s\x1b[0m", txt)
+end
+
+local msg = function (...)
+ iowrite (stringformat (...))
+ iowrite "\n"
+end
+
+local separator_string = string.rep ("-", 79)
+local separator = function ()
+ iowrite (separator_string)
+ iowrite "\n"
+end
+
+local good_tag = stringformat("[\x1b[1;30;%dmgood\x1b[0m] · ", 42)
+local bad_tag = stringformat("[\x1b[1;30;%dmBAD\x1b[0m] · ", 41)
+local alert_tag = stringformat("[\x1b[1;%dmalert\x1b[0m] · " , 36)
+local status_tag = stringformat("[\x1b[0;%dmstatus\x1b[0m] · " , 36)
+
+local good = function (...)
+ local msg = (stringformat (...))
+ iowrite (good_tag)
+ iowrite (msg)
+ iowrite "\n"
+end
+
+local bad = function (...)
+ local msg = (stringformat (...))
+ iowrite (bad_tag)
+ iowrite (msg)
+ iowrite "\n"
+end
+
+local attention = function (...)
+ local msg = (stringformat (...))
+ iowrite (alert_tag)
+ iowrite (msg)
+ iowrite "\n"
+end
+
+local status = function (...)
+ local msg = (stringformat (...))
+ iowrite (status_tag)
+ iowrite (msg)
+ iowrite "\n"
+end
+
+-------------------------------------------------------------------------------
+-- definitions
+-------------------------------------------------------------------------------
+
+--- Accounting of upstream files. There are different categories:
+---
+--- · *essential*: Files required at runtime.
+--- · *merged*: Files merged into the fontloader package.
+--- · *ignored*: Lua files not merged, but part of the format.
+--- · *tex*: TeX code, i.e. format and examples.
+--- · *lualibs*: Files merged, but also provided by the Lualibs package.
+
+local kind_essential = 0
+local kind_merged = 1
+local kind_tex = 2
+local kind_ignored = 3
+local kind_lualibs = 4
+
+local kind_name = {
+ [0] = "essential",
+ [1] = "merged" ,
+ [2] = "tex" ,
+ [3] = "ignored" ,
+ [4] = "lualibs" ,
+}
+
+local imports = {
+
+ fontloader = {
+ { name = "basics-gen" , ours = nil , kind = kind_essential },
+ { name = "basics-nod" , ours = nil , kind = kind_merged },
+ { name = "basics" , ours = nil , kind = kind_tex },
+ { name = "fonts-cbk" , ours = nil , kind = kind_merged },
+ { name = "fonts-def" , ours = nil , kind = kind_merged },
+ { name = "fonts-demo-vf-1" , ours = nil , kind = kind_ignored },
+ { name = "fonts-enc" , ours = nil , kind = kind_merged },
+ { name = "fonts-ext" , ours = nil , kind = kind_merged },
+ { name = "fonts-inj" , ours = nil , kind = kind_merged },
+ { name = "fonts-lua" , ours = nil , kind = kind_merged },
+ { name = "fonts-merged" , ours = "reference" , kind = kind_essential },
+ { name = "fonts-ota" , ours = nil , kind = kind_merged },
+ { name = "fonts-otn" , ours = nil , kind = kind_merged },
+ { name = "fonts" , ours = nil , kind = kind_merged },
+ { name = "fonts" , ours = nil , kind = kind_tex },
+ { name = "fonts-syn" , ours = nil , kind = kind_ignored },
+ { name = "fonts-tfm" , ours = nil , kind = kind_merged },
+ { name = "languages" , ours = nil , kind = kind_ignored },
+ { name = "languages" , ours = nil , kind = kind_tex },
+ { name = "math" , ours = nil , kind = kind_ignored },
+ { name = "math" , ours = nil , kind = kind_tex },
+ { name = "mplib" , ours = nil , kind = kind_ignored },
+ { name = "mplib" , ours = nil , kind = kind_tex },
+ { name = "plain" , ours = nil , kind = kind_tex },
+ { name = "preprocessor" , ours = nil , kind = kind_ignored },
+ { name = "preprocessor" , ours = nil , kind = kind_tex },
+ { name = "preprocessor-test" , ours = nil , kind = kind_tex },
+ { name = "swiglib" , ours = nil , kind = kind_ignored },
+ { name = "swiglib" , ours = nil , kind = kind_tex },
+ { name = "swiglib-test" , ours = nil , kind = kind_ignored },
+ { name = "swiglib-test" , ours = nil , kind = kind_tex },
+ { name = "test" , ours = nil , kind = kind_tex },
+ }, --[[ [fontloader] ]]
+
+ context = { --=> all merged
+ { name = "data-con" , ours = "data-con" , kind = kind_merged },
+ { name = "font-afk" , ours = "font-afk" , kind = kind_merged },
+ { name = "font-afm" , ours = "font-afm" , kind = kind_merged },
+ { name = "font-cid" , ours = "font-cid" , kind = kind_merged },
+ { name = "font-con" , ours = "font-con" , kind = kind_merged },
+ { name = "font-def" , ours = "font-def" , kind = kind_merged },
+ { name = "font-ini" , ours = "font-ini" , kind = kind_merged },
+ { name = "font-map" , ours = "font-map" , kind = kind_merged },
+ { name = "font-otb" , ours = "font-otb" , kind = kind_merged },
+ { name = "font-otf" , ours = "font-otf" , kind = kind_merged },
+ { name = "font-oti" , ours = "font-oti" , kind = kind_merged },
+ { name = "font-otp" , ours = "font-otp" , kind = kind_merged },
+ { name = "font-tfm" , ours = "font-tfm" , kind = kind_merged },
+ { name = "l-boolean" , ours = "l-boolean" , kind = kind_lualibs },
+ { name = "l-file" , ours = "l-file" , kind = kind_lualibs },
+ { name = "l-function" , ours = "l-function" , kind = kind_lualibs },
+ { name = "l-io" , ours = "l-io" , kind = kind_lualibs },
+ { name = "l-lpeg" , ours = "l-lpeg" , kind = kind_lualibs },
+ { name = "l-lua" , ours = "l-lua" , kind = kind_lualibs },
+ { name = "l-math" , ours = "l-math" , kind = kind_lualibs },
+ { name = "l-string" , ours = "l-string" , kind = kind_lualibs },
+ { name = "l-table" , ours = "l-table" , kind = kind_lualibs },
+ { name = "util-str" , ours = "util-str" , kind = kind_lualibs },
+ }, --[[ [context] ]]
+} --[[ [imports] ]]
+
+local hash_file = function (fname)
+ if not lfsisfile (fname) then
+ die ("cannot find %s.", fname)
+ end
+ local raw = ioloaddata (fname)
+ if not raw then
+ die ("cannot read from %s.", fname)
+ end
+ return md5sumhexa (raw)
+end
+
+local derive_category_path = function (cat)
+ local subpath = origin_paths[cat] or die ("category " .. cat .. " unknown")
+ local location = file.join (parms.context_root, subpath)
+ if not lfsisdir (location) then
+ die ("invalid base path defined for category "
+ .. cat .. " at " .. location)
+ end
+ return location
+end
+
+local derive_suffix = function (kind)
+ if kind == kind_tex then return ".tex" end
+ return ".lua"
+end
+
+local pfxlen = { }
+local strip_prefix = function (fname, prefix)
+ prefix = prefix or our_prefix
+ if not pfxlen[prefix] then pfxlen[prefix] = #prefix end
+ local len = pfxlen[prefix]
+ if #fname <= len + 2 then
+ --- too short to accomodate prefix + basename
+ return
+ end
+ if string.sub (fname, 1, len) == prefix then
+ return string.sub (fname, len + 2)
+ end
+end
+
+local derive_fullname = function (cat, name, kind)
+ local tmp = prefixes[cat]
+ tmp = tmp and tmp .. "-" .. name or name
+ return tmp .. derive_suffix (kind)
+end
+
+local derive_ourname = function (name, kind)
+ local suffix = derive_suffix (kind)
+ local subdir = kind == kind_essential and "runtime" or "misc"
+ return subdir, our_prefix .. "-" .. name .. suffix
+end
+
+local format_file_definition = function (def)
+ return stringformat ("name = \"%s\", kind = \"%s\"",
+ def.name,
+ kind_name[def.kind] or def.kind)
+ .. (def.ours and (", ours = \"" .. def.ours .. "\"") or "")
+end
+
+local is_readable = function (f)
+ local fh = io.open (f, "r")
+ if fh then
+ fh:close()
+ return true
+ end
+ return false
+end
+
+local summarize_news = function (status)
+ local ni = #status.import
+ local nc = #status.create
+ local ng = #status.good
+ local nm = #status.missing
+
+ separator ()
+ msg ("Summary: Inspected %d files.", ni + nc + ng + nm)
+ separator ()
+ if ng > 0 then good ("%d are up to date", ng) end
+ if ni > 0 then attention ("%d changed" , ni) end
+ if nc > 0 then attention ("%d new" , nc) end
+ if nm > 0 then bad ("%d missing" , nm) end
+ separator ()
+
+ if nm == 0 and nc == 0 and ni == 0 then
+ return 0
+ end
+
+ return -1
+end
+
+local news = function ()
+ local status = {
+ import = { },
+ good = { },
+ create = { },
+ missing = { },
+ }
+
+ for cat, entries in next, imports do
+ local location = derive_category_path (cat)
+ local nfiles = #entries
+
+ for i = 1, nfiles do
+ local def = entries[i]
+ local name = def.name
+ local ours = def.ours
+ local kind = def.kind
+ local fullname = derive_fullname (cat, name, kind)
+ local fullpath = file.join (location, fullname)
+ local subdir, ourname = derive_ourname (ours or name, kind)
+ local ourpath = file.join (fontloader_subdir, subdir, ourname) -- relative
+ local imported = false
+
+ if not is_readable (fullpath) then
+ bad ("source for file %s not found at %s",
+ emphasis (ourname),
+ emphasis (fullpath))
+ status.missing[#status.missing + 1] = ourname
+ else
+ --- Source file exists and is readable.
+ if not lfsisdir (fontloader_subdir) then
+ die ("path for fontloader tree ("
+ .. fontloader_subdir .. ") is not a directory")
+ end
+ if is_readable (ourpath) then imported = true end
+ local src_hash = hash_file (fullpath)
+ local dst_hash = imported and hash_file (ourpath)
+ local same = src_hash == dst_hash -- same!
+
+ if same then
+ good ("file %s unchanged", emphasis (ourname))
+ status.good[#status.good + 1] = ourname
+ elseif not dst_hash then
+ attention ("new file %s requires import from %s",
+ emphasis (ourname),
+ emphasis (fullpath))
+ status.create[#status.create + 1] = ourname
+ else --- src and dst exist but differ
+ attention ("file %s requires import", emphasis (ourname))
+ status.import[#status.import + 1] = ourname
+ end
+ end
+
+ end
+ end
+
+ return summarize_news (status)
+end --[[ [local news = function ()] ]]
+
+local get_file_definition = function (name, ourname, kind)
+ kind = kind or "lua"
+ for cat, defs in next, imports do
+ local fullname = derive_fullname (cat, name, kind)
+ local ndefs = #defs
+ for i = 1, ndefs do
+ local def = defs[i]
+ local dname = def.name
+ local dours = def.ours or def.name
+ local dkind = def.kind
+
+ --- test properties
+ local subdir, derived = derive_ourname (dours, dkind)
+ if derived == ourname then return def, cat end
+ if derive_fullname (cat, dname, dkind) == fullname then return def, cat end
+ if dours == ourname then return def, cat end
+ if dname == fullname then return def, cat end
+ end
+ end
+ --- search unsuccessful
+end --[[ [local get_file_definition = function (name, ourname, kind)] ]]
+
+local import_imported = 0
+local import_skipped = 1
+local import_failed = 2
+local import_created = 3
+
+local import_status = {
+ [import_imported] = "imported",
+ [import_skipped ] = "skipped",
+ [import_failed ] = "failed",
+ [import_created ] = "created",
+}
+
+local summarize_status = function (counters)
+ local imported = counters[import_imported] or 0
+ local skipped = counters[import_skipped ] or 0
+ local created = counters[import_created ] or 0
+ local failed = counters[import_failed ] or 0
+ local sum = imported + skipped + created + failed
+ if sum < 1 then die ("garbage total of imported files: %s", sum) end
+ separator ()
+ status (" RESULT: %d files processed", sum)
+ separator ()
+ if created > 0 then status ("created: %d (%d %%)", created , created * 100 / sum) end
+ if imported > 0 then status ("imported: %d (%d %%)", imported, imported * 100 / sum) end
+ if skipped > 0 then status ("skipped: %d (%d %%)", skipped , skipped * 100 / sum) end
+ separator ()
+end
+
+local import_file = function (name, kind, def, cat)
+ local expected_ourname = derive_ourname (name)
+ if not def or not cat then
+ def, cat = get_file_definition (name, expected_ourname, kind)
+ end
+
+ if not def then die ("unable to find a definition matching " .. name) end
+ if not cat then die ("missing category for file " .. name .. " -- WTF‽") end
+
+ local dname = def.name
+ local dours = def.ours or dname
+ local dkind = def.kind
+ local srcdir = derive_category_path (cat)
+ local fullname = derive_fullname (cat, dname, kind)
+ local subdir, ourname = derive_ourname (dours, kind)
+ local ourpath = file.join (fontloader_subdir, subdir)
+ local src = file.join (srcdir, fullname)
+ local dst = file.join (ourpath, ourname)
+ local new = not lfsisfile (dst)
+ if not new and hash_file (src) == hash_file (dst) then
+ status ("file %s is unchanged, skipping", fullname)
+ return import_skipped
+ end
+ if not (lfsisdir (ourpath) or not lfs.mkdirs (ourpath)) then
+ die ("failed to create directory %s for file %s",
+ ourpath, ourname)
+ end
+ status ("importing file %s", fullname)
+ file.copy (src, dst)
+ if hash_file (src) == hash_file (dst) then
+ if new then return import_created end
+ return import_imported end
+ return import_failed
+end --[[ [local import_file = function (name, kind)] ]]
+
+local import = function (arg)
+ --- Multiple files
+ local statcount = { } -- import status codes -> size_t
+ for cat, defs in next, imports do
+ local ndefs = #defs
+ for i = 1, ndefs do
+ local def = defs[i]
+ local stat = import_file (def.name, def.kind, def, cat)
+ if stat == import_failed then
+ die (stringformat ("import failed at file %d of %d (%s)",
+ i, ndefs, def.name))
+ end
+ statcount[stat] = statcount[stat] or 0
+ statcount[stat] = statcount[stat] + 1
+ end
+ end
+ summarize_status (statcount)
+ return 0
+end --[[ [local import = function (arg)] ]]
+
+local find_in_path = function (root, subdir, target)
+ local file = file.join (root, subdir, target)
+ if lfsisfile (file) then
+ return file
+ end
+end
+
+local search_paths = function (target)
+ for i = 1, #searchdirs do
+ local root = searchdirs[i]
+
+ for j = 1, #subdirs do
+ local found = find_in_path (root, subdirs[j], target)
+ if found then return found end
+ end
+
+ end
+
+ local found = find_in_path (parms.context_root, origin_paths.context, target)
+ if found then return found end
+
+ local found = find_in_path (parms.context_root, origin_paths.fontloader, target)
+ if found then return found end
+ return false
+end
+
+local search_defs = function (target)
+ local variants = { target, --[[ unstripped ]] }
+ local tmp
+ tmp = strip_prefix (target)
+ if tmp then variants[#variants + 1] = tmp end
+ tmp = strip_prefix (target, luatex_fonts_prefix)
+ if tmp then variants[#variants + 1] = tmp end
+
+ local nvariants = #variants
+
+ for cat, defs in next, imports do
+ local ndefs = #defs
+ for i = 1, ndefs do
+ local def = defs[i]
+
+ for i = 1, nvariants do
+ local variant = variants[i]
+
+ local dname = def.name
+ if variant == dname then
+ local found = search_paths (variant .. derive_suffix (def.kind))
+ if found then return found end
+ end
+
+ local dkind = def.kind
+ local dfull = derive_fullname (cat, dname, dkind)
+ if derive_fullname (cat, variant, dkind) == dfull then
+ local found = search_paths (dfull)
+ if found then return found end
+ end
+
+ local dours = def.ours
+ if dours then
+
+ local _, ourname = derive_ourname (dours, dkind)
+ if variant == dours then
+ local found = search_paths (ourname)
+ if found then return found end
+ end
+
+ if variant == ourname then
+ local found = search_paths (ourname)
+ if found then return found end
+ end
+ end
+
+ end
+ end
+ end
+ return false
+end
+
+local search = function (target)
+ local look_for
+ --- pick a file
+ if lfsisfile (target) then --- absolute path given
+ look_for = target
+ goto found
+ else
+
+ --- search as file name in local tree
+ look_for = search_paths (target)
+ if look_for then goto found end
+
+ --- seach the definitions
+ look_for = search_defs (target)
+ if look_for then goto found end
+
+ end
+
+::fail::
+ if not look_for then return end
+
+::found::
+ return look_for
+end
+
+local find_matching_def = function (location)
+ local basename = file.basename (location)
+ if not basename then die ("corrupt path %s", location) end
+ local barename = file.removesuffix (basename)
+ local pfxless = strip_prefix (barename)
+ local kind = file.suffix (pfxless) or "lua"
+ for cat, defs in next, imports do
+ for _, def in next, defs do
+ local dname = def.name
+ local dours = def.ours
+ if dname == pfxless
+ or dname == barename
+ -- dname == basename -- can’t happen for lack of suffix
+ or dours == pfxless
+ or dours == barename
+ then
+ return cat, def
+ end
+ end
+ end
+ return false
+end
+
+local describe = function (target, location)
+ --- Map files to import definitions
+ separator ()
+ status ("found file %s at %s", target, location)
+ local cat, def = find_matching_def (location)
+ if not cat or not def then
+ die ("file %s not found in registry", location)
+ end
+
+ local dname = def.name
+ local dkind = def.kind
+ local subdir, ourname = derive_ourname (def.ours or dname, dkind)
+ separator ()
+ status ("category %s", cat)
+ status ("kind %s", kind_name[dkind])
+ status ("in Context %s", derive_fullname (cat, dname, dkind))
+ status ("in Luaotfload %s", ourname)
+ separator ()
+ return 0
+end
+
+local tell = function (arg)
+ local target = parms.target
+ if not target then die "no filename given" end
+
+ local location = search (target)
+ if not location then
+ die ("file %s not found in any of the search locations", target)
+ end
+
+ return describe (target, location)
+end
+
+local build_paths = function (argv)
+ if not argv or type (argv) ~= "table" then die "build_paths" end
+
+ local orig_dir = lfscurrentdir ()
+ local base_dir = orig_dir .. loader_orig_dir
+ local target_name = orig_dir .. loader_target_dir
+ .. stringformat (loader_target_name, os.date ("%F"))
+ local merge_name = base_dir .. loader_merge_name
+ local output_name = base_dir .. loader_output_name
+
+ if #argv >= 2 then
+ local fname = argv[2]
+ local dir = filedirname (fname) .. "/"
+ if not lfsisdir (dir) then
+ die ("second argument must be point into existing directory, not “%s”",
+ argv[2])
+ end
+ base_dir = dir
+ merge_name = fname
+ output_name = dir .. loader_output_name
+ end
+
+ if #argv == 3 then
+ --- also set the target name
+ local fname = argv[3]
+ local dir = filedirname (fname)
+ if not string.is_empty (dir) and not lfsisdir (dir) then
+ die ("third argument must be point into writable directory, not “%s”",
+ argv[3])
+ end
+ target_name = fname
+ end
+
+ local ret = {
+ orig_dir = orig_dir,
+ base_dir = base_dir,
+ merge_name = merge_name,
+ target_name = target_name,
+ output_name = output_name,
+ }
+ return ret
+end
+
+--[[doc--
+
+ Packaging works as follows:
+
+ * Files are looked up the usual way, allowing us to override the
+ distribution-supplied scripts with our own alternatives in the
+ local path.
+
+ * The merged package is written to the same directory as the
+ packaging script (not ``$PWD``).
+
+ There is some room for improvements: Instead of reading a file with
+ fixed content from disk, the merge script could be composed
+ on-the-fly from a list of files and then written to memory (not sure
+ though if we can access shm_open or memfd and the likes from Lua).
+
+--doc]]--
+
+local package = function (argv)
+ local t0 = osgettimeofday ()
+ local paths = build_paths (argv)
+
+ status ("assuming fontloader source in %s", paths.base_dir)
+ status ("reading merge instructions from %s", paths.merge_name)
+ status ("mtx-package result at %s", paths.output_name)
+ status ("writing output to %s", paths.target_name)
+
+ --- check preconditions
+
+ if not lfsisdir (paths.base_dir) then die ("directory %s does not exist", emphasis (paths.base_dir )) end
+ if not lfsisfile (paths.merge_name) then die ("missing merge file at %s", emphasis (paths.merge_name )) end
+ if not fileiswritable (paths.output_name) then die ("cannot write to %s", emphasis (paths.output_name)) end
+ if not fileiswritable (paths.target_name) then die ("cannot write to %s", emphasis (paths.target_name)) end
+---- not lfschdir (paths.base_dir) then die ("failed to cd into %s", emphasis (paths.base_dir )) end
+
+ if lfsisfile (paths.output_name) then
+ status ("output file already exists at “%s”, unlinking",
+ paths.output_name)
+ local ret, err = os.remove (paths.output_name)
+ if ret == nil then
+ if not lfschdir (paths.orig_dir) then
+ status ("warning: failed to cd retour into %s",
+ emphasis (paths.orig_dir))
+ end
+ die ("failed to remove existing merge package")
+ end
+ end
+ --die ("missing merge file at %s", emphasis (paths.merge_name )) end
+
+ --- perform merge
+
+ local cmd = { "mtxrun", "--script", "package", "--merge", paths.merge_name }
+ local shl = tableconcat (cmd, " ")
+
+ status ("invoking %s as “%s”", emphasis "mtx-package", shl)
+
+ local fh = iopopen (shl, "r")
+
+ if not fh then
+ if not lfschdir (paths.orig_dir) then
+ status ("warning: failed to cd retour into %s",
+ emphasis (paths.orig_dir))
+ end
+ die ("merge failed; failed to invoke mtxrun")
+ end
+
+ local junk = fh.read (fh, "*all")
+ if not junk then
+ status ("warning: received no output from mtxrun; this is strange")
+ end
+
+ fh.close (fh)
+
+ if debug then print (junk) end
+
+ --- clean up
+
+ if not lfschdir (paths.orig_dir) then
+ status ("warning: failed to cd retour into %s",
+ emphasis (paths.orig_dir))
+ end
+
+ --- check postconditions
+
+ if not lfsisfile (paths.output_name) then
+ die ("merge failed; package not found at " .. paths.output_name)
+ end
+
+ --- at this point we know that mtxrun was invoked correctly and the
+ --- result file has been created
+
+ if lfsisfile (paths.target_name) then
+ status ("target file %s exists, overwriting", emphasis (paths.target_name))
+ end
+
+ local res, err = osrename (paths.output_name, paths.target_name)
+
+ if res == nil then
+ die ("merge failed; failed to move package from %s to %s",
+ paths.output_name, paths.target_name)
+ end
+
+ status ("merge complete; operation finished in %.0f ms",
+ (osgettimeofday() - t0) * 1000)
+ status ("a fresh fontloader at %s is ready to roll", paths.target_name)
+end
+
+local help = function ()
+ iowrite "usage: mkimport <command> [<args>]\n"
+ iowrite "\n"
+ iowrite "Where <command> is one of\n"
+ iowrite " help Print this help message\n"
+ iowrite " tell Display information about a file’s integration\n"
+ iowrite " news Check Context for updated files\n"
+ iowrite " import Update with files from Context\n"
+ iowrite " package Invoke mtx-package on the current fontloader\n"
+ iowrite "\n"
+end
+
+local job_kind = table.mirrored {
+ help = help,
+ import = import,
+ news = news,
+ package = package,
+ tell = tell,
+}
+
+-------------------------------------------------------------------------------
+-- functionality
+-------------------------------------------------------------------------------
+
+--- job_kind -> bool
+local check_job = function (j)
+ return job_kind[j] or die ("invalid job type “%s”.", j)
+end
+
+local parse_argv = function (argv)
+ local job
+ local tgt
+ local pth
+
+ local argc = #arg
+ if argc < 1 or argc > 3 then return "help" end
+ job = arg[1] or "help"
+ if argc > 1 then
+ tgt = arg[2]
+ if argc == 3 then pth = arg[3] end
+ end
+ if not pth then pth = "~/context/tex/texmf-context" end
+ parms.context_root = kpse.expand_path (pth)
+ parms.target = tgt
+ searchdirs [#searchdirs + 1] = pth
+ return job
+end
+
+-------------------------------------------------------------------------------
+-- entry point
+-------------------------------------------------------------------------------
+
+local main = function ()
+ local job = parse_argv (arg)
+ local runner = check_job (job)
+ return runner(arg)
+end
+
+os.exit (main ())
+
+--- vim:ft=lua:ts=2:et:sw=2
diff --git a/Master/texmf-dist/scripts/luaotfload/mkstatus b/Master/texmf-dist/scripts/luaotfload/mkstatus
index 802b2cd4555..f27d57118cb 100755
--- a/Master/texmf-dist/scripts/luaotfload/mkstatus
+++ b/Master/texmf-dist/scripts/luaotfload/mkstatus
@@ -26,6 +26,7 @@ local iosavedata = io.savedata
local iopopen = io.popen
local iowrite = io.write
local lfsisdir = lfs.isdir
+local stringmatch = string.match
-----------------------------------------------------------------------
-- settings
@@ -34,35 +35,95 @@ local lfsisdir = lfs.isdir
local verbose = false
local filelist = "./build/luaotfload-status.lua" --- result
+local srcdir = "src"
+local builddir = "build"
+local scriptdir = "scripts"
+local loaderdir = "src/fontloader"
+local rtdir = "src/fontloader/runtime"
+local miscdir = "src/fontloader/misc"
+
local names = {
- --- only the runtime files and scripts
- { "src", "luaotfload-auxiliary.lua", },
- { "src", "luaotfload-basics-gen.lua", },
- { "src", "luaotfload-basics-nod.lua", },
- { "build", "luaotfload-characters.lua", },
- { "src", "luaotfload-colors.lua", },
- { "src", "luaotfload-database.lua", },
- { "src", "luaotfload-diagnostics.lua", },
- { "src", "luaotfload-features.lua", },
- { "src", "luaotfload-fonts-cbk.lua", },
- { "src", "luaotfload-fonts-def.lua", },
- { "src", "luaotfload-fonts-enc.lua", },
- { "src", "luaotfload-fonts-ext.lua", },
- { "src", "luaotfload-fonts-lua.lua", },
- { "src", "luaotfload-fonts-tfm.lua", },
- { "build", "luaotfload-glyphlist.lua", },
- { "src", "luaotfload-letterspace.lua", },
- { "src", "luaotfload-loaders.lua", },
- { "src", "luaotfload-log.lua", },
- { "src", "luaotfload-main.lua", },
- { "src", "luaotfload-fontloader.lua", },
- { "src", "luaotfload-override.lua", },
- { "src", "luaotfload-parsers.lua", },
- { "src", "luaotfload-tool.lua", },
- { "scripts", "mkcharacters", },
- { "scripts", "mkglyphlist", },
- { "scripts", "mkstatus", },
-}
+
+ --- Luaotfload runtime files
+ { srcdir, "luaotfload-auxiliary.lua", },
+ { srcdir, "luaotfload-colors.lua", },
+ { srcdir, "luaotfload-configuration.lua", },
+ { srcdir, "luaotfload-database.lua", },
+ { srcdir, "luaotfload-diagnostics.lua", },
+ { srcdir, "luaotfload-features.lua", },
+ { srcdir, "luaotfload-init.lua", },
+ { srcdir, "luaotfload-letterspace.lua", },
+ { srcdir, "luaotfload-loaders.lua", },
+ { srcdir, "luaotfload-log.lua", },
+ { srcdir, "luaotfload-main.lua", },
+ { srcdir, "luaotfload-parsers.lua", },
+ { srcdir, "luaotfload-resolvers.lua", },
+ { srcdir, "luaotfload-tool.lua", },
+
+ --- generated files
+ { builddir, "luaotfload-characters.lua", },
+ { builddir, "luaotfload-glyphlist.lua", },
+
+ --- scripts
+ { scriptdir, "mkcharacters", },
+ { scriptdir, "mkglyphlist", },
+ { scriptdir, "mkimport", },
+ { scriptdir, "mkstatus", },
+ { scriptdir, "mktests", },
+
+ --- merge script
+ { loaderdir, "luaotfload-package.lua", },
+
+ --- fontloader runtimes
+ { rtdir, "fontloader-basics-gen.lua", },
+ { rtdir, "fontloader-reference.lua", },
+ { rtdir, "fontloader-tl2014.lua", },
+
+ --- fontloader constituents
+ { miscdir, "fontloader-basics-nod.lua", },
+ { miscdir, "fontloader-data-con.lua", },
+ { miscdir, "fontloader-font-afk.lua", },
+ { miscdir, "fontloader-font-afm.lua", },
+ { miscdir, "fontloader-font-cid.lua", },
+ { miscdir, "fontloader-font-con.lua", },
+ { miscdir, "fontloader-font-def.lua", },
+ { miscdir, "fontloader-font-ini.lua", },
+ { miscdir, "fontloader-font-map.lua", },
+ { miscdir, "fontloader-font-otb.lua", },
+ { miscdir, "fontloader-font-otf.lua", },
+ { miscdir, "fontloader-font-oti.lua", },
+ { miscdir, "fontloader-font-otp.lua", },
+ { miscdir, "fontloader-fonts-cbk.lua", },
+ { miscdir, "fontloader-fonts-def.lua", },
+ { miscdir, "fontloader-fonts-demo-vf-1.lua", },
+ { miscdir, "fontloader-fonts-enc.lua", },
+ { miscdir, "fontloader-fonts-ext.lua", },
+ { miscdir, "fontloader-fonts-inj.lua", },
+ { miscdir, "fontloader-fonts.lua", },
+ { miscdir, "fontloader-fonts-lua.lua", },
+ { miscdir, "fontloader-fonts-ota.lua", },
+ { miscdir, "fontloader-fonts-otn.lua", },
+ { miscdir, "fontloader-fonts-syn.lua", },
+ { miscdir, "fontloader-fonts-tfm.lua", },
+ { miscdir, "fontloader-font-tfm.lua", },
+ { miscdir, "fontloader-languages.lua", },
+ { miscdir, "fontloader-l-boolean.lua", },
+ { miscdir, "fontloader-l-file.lua", },
+ { miscdir, "fontloader-l-function.lua", },
+ { miscdir, "fontloader-l-io.lua", },
+ { miscdir, "fontloader-l-lpeg.lua", },
+ { miscdir, "fontloader-l-lua.lua", },
+ { miscdir, "fontloader-l-math.lua", },
+ { miscdir, "fontloader-l-string.lua", },
+ { miscdir, "fontloader-l-table.lua", },
+ { miscdir, "fontloader-math.lua", },
+ { miscdir, "fontloader-mplib.lua", },
+ { miscdir, "fontloader-preprocessor.lua", },
+ { miscdir, "fontloader-swiglib.lua", },
+ { miscdir, "fontloader-swiglib-test.lua", },
+ { miscdir, "fontloader-util-str.lua", },
+
+} --[[local names]]
-----------------------------------------------------------------------
-- helpers
@@ -121,8 +182,9 @@ end
local hash_all
hash_all = function (list, acc)
- if list == nil then
- return hash_all (table.fastcopy (names), { })
+ if acc == nil then
+ local base = table.fastcopy (names)
+ return hash_all (table.append (base, list), { })
end
local finfo = list[#list]
@@ -156,10 +218,58 @@ hash_all = function (list, acc)
return acc
end
+local handle_argv = function (argv)
+ local ret = { files = { }, loader = nil }
+ local argc = #argv
+ if argc < 1 then return ret end
+ local argoff = 1
+ if argv [1] == "-v" then
+ verbose = true
+ if argc == 1 then return ret end
+ argoff = 2
+ end
+ local aux aux = function (acc, i)
+ if i > argc then return acc else
+ local cur = argv[i]
+ if type (cur) == "string" then
+ local loader = stringmatch (cur, "--fontloader=(.+)$")
+ if loader then
+ cur = loader
+ acc.loader = file.basename (cur)
+ end
+ if lfs.isfile (cur) then
+ local files = acc.files
+ files[#files + 1] = cur
+ end
+ else
+ die ("file not found: %s", tostring (cur))
+ end
+ return aux (acc, i + 1)
+ end
+ end
+ return aux (ret, argoff)
+end
+
+local add_files
+add_files = function (lst, acc)
+ if lst == nil then return end
+ if acc == nil then return add_files (lst, { }) end
+ local len = #lst
+ if len == 0 then return acc end
+ local cur = lst[len]
+ local fname = file.basename (cur)
+ local path = file.dirname (cur)
+ acc[#acc + 1] = { path, fname }
+ lst[len] = nil
+ return add_files (lst, acc)
+end
+
local main = function ()
- if arg [1] == "-v" then verbose = true end
- local hashes = hash_all ()
+ local raw_extra = handle_argv (arg)
+ local cuit_extra = add_files (raw_extra.files)
+ local hashes = hash_all (cuit_extra)
local notes = git_info ()
+ notes.loader = raw_extra.loader
local serialized = table.serialize ({ notes = notes,
hashes = hashes }, true)
local success = io.savedata (filelist, serialized)
diff --git a/Master/texmf-dist/scripts/luaotfload/mktests b/Master/texmf-dist/scripts/luaotfload/mktests
new file mode 100755
index 00000000000..ad8c4f5c594
--- /dev/null
+++ b/Master/texmf-dist/scripts/luaotfload/mktests
@@ -0,0 +1,362 @@
+#!/usr/bin/env texlua
+-----------------------------------------------------------------------
+-- FILE: mktests
+-- USAGE: ./mktests
+-- DESCRIPTION: test the behavior of Luaotfload
+-- REQUIREMENTS: Luatex > 0.76, Luaotfload
+-- AUTHOR: Philipp Gesang (Phg), <phg42.2a@gmail.com>
+-----------------------------------------------------------------------
+--
+--===================================================================--
+-- NOTE
+-- this is a stub, to be completed long-term
+-- suggestions welcome
+--===================================================================--
+
+
+local tests = { }
+local lpeg = require "lpeg"
+local lpegmatch = lpeg.match
+
+config = { luaotfload = { } }
+luatexbase = { }
+
+kpse.set_program_name "luatex"
+
+require "lualibs"
+require "luaotfload-basics-gen.lua"
+require "luaotfload-log.lua"
+
+fonts = { names = { } } -- for db; normally provided by the fontloaders
+
+local require_init = { }
+
+local loadmodule = function (name)
+ local v = require ("luaotfload-" .. name)
+ if v then
+ local mod = { }
+ local tv = type (v)
+ if tv == "table" then
+ mod.name = name
+ mod.init = v.init
+ require_init [#require_init + 1] = mod
+ elseif tv == "function" then
+ mod.name = name
+ mod.init = v
+ require_init [#require_init + 1] = mod
+ end
+ end
+end
+
+require "alt_getopt"
+
+loadmodule "log.lua" --- this populates the luaotfload.log.* namespace
+loadmodule "parsers" --- fonts.conf, configuration, and request syntax
+loadmodule "configuration" --- configuration file handling
+loadmodule "database"
+loadmodule "resolvers" --- Font lookup
+
+do --- init_modules
+ --- NB we don’t command the logger at this point.
+ local todo = #require_init
+ local ret = true
+ for i = 1, todo do
+ local mod = require_init[i]
+ local name = mod.name
+ local init = mod.init
+ if type (init) ~= "function" then
+ error ("luaotfload broken; module "
+ .. name .. " missing initializers!")
+ end
+ local v = mod.init ()
+ if v == true then
+ --- evaluated well
+ elseif type (v) == "table" then
+ luaotfload[name] = v
+ else
+ error ("luaotfload broken; initialization of module "
+ .. name .. " returned " .. tostring (v) .. ".")
+ return false
+ end
+ end
+end
+
+local names = fonts.names
+
+-----------------------------------------------------------------------
+--- helper functions
+-----------------------------------------------------------------------
+
+local pprint_resolve = function (input, output, result)
+ texio.write_nl (string.format ("[%s] “%s” -> “%s”",
+ result == true and "passed" or "failed",
+ input,
+ output))
+end
+
+local pprint_result = function (name, failed, total)
+ if failed == 0 then
+ texio.write_nl (string.format ("[%s] all %d passed", name, total))
+ else
+ texio.write_nl (string.format ("[%s] %d of %d failed",
+ name,
+ failed,
+ total))
+ end
+end
+
+local pprint_spec = function (spec)
+ return string.format ("%s/%s*%.2fpt",
+ spec.specification,
+ spec.style or "regular",
+ spec.optsize or 0)
+end
+
+-----------------------------------------------------------------------
+--- parser tests
+-----------------------------------------------------------------------
+
+local test_config_input = [==[
+
+
+[foo]
+bar = baz
+xyzzy = no
+buzz
+
+[lavernica "brutalitops"]
+# It’s a locomotive that runs on us.
+ laan-ev = zip zop zooey ; jib-jab
+Crouton = "Fibrosis \"\\ # "
+
+]==]
+
+local test_config_output = {
+ { section = { title = "foo" },
+ variables = { bar = "baz",
+ xyzzy = false,
+ buzz = true } },
+ { section = { title = "lavernica",
+ subtitle = "brutalitops" },
+ variables = { ["laan-ev"] = "zip zop zooey",
+ crouton = "Fibrosis \"\\ # " } }
+}
+
+local parse_config = function ()
+ local parser = luaotfload.parsers.config
+ local result = lpegmatch (parser, test_config_input)
+ --- compare values recursively
+ local aux aux = function (t1, t2)
+ --- cheaply non-tail recursive
+ local k1 = table.keys (t1)
+ local k2 = table.keys (t2)
+ if #k1 ~= #k2 then
+ return false
+ end
+ for i = 1, #k1 do
+ local k = k1[i]
+ local v1 = t1[k]
+ local v2 = t2[k]
+ if type (v1) == "table" then
+ if type (v2) ~= "table" or not aux (v1, v2) then
+ return false
+ end
+ elseif v1 ~= v2 then
+ return false
+ end
+ end
+ return true
+ end
+ return aux (result, test_config_output) and 0 or 1, 1
+end
+
+tests["parse_config"] = parse_config
+
+-----------------------------------------------------------------------
+--- font tests
+-----------------------------------------------------------------------
+
+--- test sets
+
+local infer_regular_style = {
+ --- inferring which one is the correct style for “regular”; can be
+ --- obscured by synonyms like “book” etc.
+ { "Iwona", "Iwona-Regular.otf" }, -- trivial case
+ { "DejaVu Serif", "DejaVuSerif.ttf" },
+ { "DejaVu Sans", "DejaVuSans.ttf" },
+ { "Adobe Garamond Pro", "agaramondpro_regular.otf" },
+ { "Garamond Premier Pro", "GaramondPremrPro.otf" },
+ { "CMU Serif", "cmunrm.otf" },
+ { "CMU Sans Serif", "cmunss.otf" },
+ { "Minion Pro", "MinionPro-Regular.otf" },
+ --- Below test will succeed only if we match for the
+ --- splainname (= sanitized tfmdata.fullname) field
+ --- explicitly.
+ { "Minion Pro Italic", "MinionPro-It.otf" },
+}
+
+local choose_optical_size = {
+ { { name = "Latin Modern Roman", optsize = 1 }, "lmroman5-regular.otf" },
+ { { name = "Latin Modern Roman", optsize = 10 }, "lmroman10-regular.otf" },
+ { { name = "Latin Modern Roman", optsize = 12 }, "lmroman12-regular.otf" },
+ { { name = "Latin Modern Roman", optsize = 42 }, "lmroman17-regular.otf" },
+ { { name = "EB Garamond", optsize = 1 }, "EBGaramond08-Regular.otf" },
+ { { name = "EB Garamond", optsize = 8 }, "EBGaramond08-Regular.otf" },
+ { { name = "EB Garamond", optsize = 12 }, "EBGaramond12-Regular.otf" },
+ { { name = "EB Garamond", optsize = 42 }, "EBGaramond12-Regular.otf" },
+ { { name = "Garamond Premier Pro", optsize = 1 }, "GaramondPremrPro-Capt.otf" },
+ { { name = "Garamond Premier Pro", optsize = 10 }, "GaramondPremrPro.otf" },
+ { { name = "Garamond Premier Pro", optsize = 15 }, "GaramondPremrPro-Subh.otf" },
+ { { name = "Garamond Premier Pro", optsize = 42 }, "GaramondPremrPro-Disp.otf" },
+}
+
+local choose_style = {
+ { { name = "DejaVu Sans", style = "regular" }, "DejaVuSans.ttf" },
+ { { name = "DejaVu Sans", style = "italic" }, "DejaVuSans-Oblique.ttf" },
+ { { name = "DejaVu Sans", style = "bold" }, "DejaVuSans-Bold.ttf" },
+ { { name = "DejaVu Sans", style = "bolditalic" }, "DejaVuSans-BoldOblique.ttf" },
+ { { name = "Linux Libertine O", style = "regular" }, "LinLibertine_R.otf" },
+ { { name = "Linux Libertine O", style = "italic" }, "LinLibertine_RI.otf" },
+ { { name = "Linux Libertine O", style = "bold" }, "LinLibertine_RB.otf" },
+ { { name = "Linux Libertine O", style = "bolditalic" }, "LinLibertine_RBI.otf" },
+ { { name = "Liberation Serif", style = "regular" }, "LiberationSerif-Regular.ttf" },
+ { { name = "Liberation Serif", style = "italic" }, "LiberationSerif-Italic.ttf" },
+ { { name = "Liberation Serif", style = "bold" }, "LiberationSerif-Bold.ttf" },
+ { { name = "Liberation Serif", style = "bolditalic" }, "LiberationSerif-BoldItalic.ttf" },
+ { { name = "CMU Sans Serif", style = "regular" }, "cmunss.otf" }, -- no “regular” but “medium”
+ { { name = "CMU Sans Serif", style = "italic" }, "cmunsi.otf" }, -- no “italic” but “oblique”
+ { { name = "CMU Sans Serif", style = "bold" }, "cmunsx.otf" },
+ { { name = "CMU Sans Serif", style = "bolditalic" }, "cmunso.otf" },
+ --[[--
+ Minion Pro Italic is exceptionally weird regarding identifiers in
+ that the postscript fontname and both info.fontname and
+ info.fullname are given as “minionproit”. Now its english fullname
+ (field 18) is “minionproital”. Only the value “fullname” in the root of
+ the tfmdata structure (not the one returned by fontloader.info()!)
+ accurately yields “Minion Pro Italic”.
+
+ To complete the picture, the file naming isn’t very consistent either:
+ we find the suffixes “Regular” and “Bold”, but “It” and “BoldIt”. What
+ the hell were the designers smoking?
+
+ Also, the full Minion Pro set comes with different optical sizes which
+ for monetary reasons cannot considered here.
+ --]]--
+ { { name = "Minion Pro", style = "regular" }, "MinionPro-Regular.otf" },
+ { { name = "Minion Pro", style = "italic" }, "MinionPro-It.otf" },
+ { { name = "Minion Pro", style = "bold" }, "MinionPro-Bold.otf" },
+ { { name = "Minion Pro", style = "bolditalic" }, "MinionPro-BoldIt.otf" },
+}
+
+--- this needs a database built with --formats=+pfa,pfb,afm
+
+local resolve_t1_font = {
+ { { name = "URW Gothic L", style = "regular" }, "a010013l.pfb" }, --> “book”
+-- { { name = "URW Gothic L", style = "italic" }, "a010033l.pfb" }, --> “book oblique”
+-- { { name = "URW Gothic L", style = "bold" }, "a010015l.pfb" }, --> “demi”
+-- { { name = "URW Gothic L", style = "bolditalic" }, "a010035l.pfb" }, --> “demi oblique”
+ { { name = "Century Schoolbook L", style = "regular" }, "c059013l.pfb" },
+ { { name = "Century Schoolbook L", style = "italic" }, "c059033l.pfb" },
+ { { name = "Century Schoolbook L", style = "bold" }, "c059016l.pfb" },
+ { { name = "Century Schoolbook L", style = "bolditalic" }, "c059036l.pfb" },
+ { { name = "Nimbus Roman No9 L", style = "regular" }, "n021003l.pfb" },
+ { { name = "Nimbus Roman No9 L", style = "italic" }, "n021023l.pfb" },
+ { { name = "Nimbus Roman No9 L", style = "bold" }, "n021004l.pfb" }, --- medium, actually
+ { { name = "Nimbus Roman No9 L", style = "bolditalic" }, "n021024l.pfb" },
+}
+
+local translate_style = {
+ regular = "r",
+ italic = "i",
+ bold = "b",
+ bolditalic = "bi",
+}
+
+local font_name_tests = {
+ infer_regular_style,
+ choose_optical_size,
+ choose_style,
+ resolve_t1_font,
+}
+
+local default_spec = {
+ name = false,
+ lookup = "name",
+ specification = false,
+ optsize = 0,
+}
+
+local resolve_font_name = function ()
+ local failed, total = 0, 0
+ for nset = 1, #font_name_tests do
+ local set = font_name_tests[nset]
+
+ for ntest = 1, #set do
+ local test = set[ntest]
+ local input, output = test[1], test[2]
+
+ if type (input) == "string" then
+ local input_spec = table.copy (default_spec)
+ input_spec.name = input
+ input_spec.specification = input_spec.lookup .. ":" .. input
+ local result = fonts.names.lookup_font_name (input_spec) == output
+ total = total + 1
+ if not result then
+ failed = failed + 1
+ end
+ pprint_resolve (input, output, result)
+
+ else
+ local input_spec, output = test[1], test[2]
+ input_spec.specification = (input_spec.lookup
+ or default_spec.lookup)
+ .. ":" .. input_spec.name
+ input_spec.optsize = input_spec.optsize or default_spec.optsize
+ input_spec.style = translate_style [input_spec.style]
+ local result = fonts.names.lookup_font_name (input_spec) == output
+ total = total + 1
+ if not result then
+ failed = failed + 1
+ end
+ pprint_resolve (pprint_spec (input_spec), output, result)
+ end
+
+ end
+ end
+ return failed, total
+end
+
+tests ["resolve_font_name"] = resolve_font_name
+
+
+-----------------------------------------------------------------------
+--- runner
+-----------------------------------------------------------------------
+
+local main = function ()
+ local failed, total = 0, 0
+ config.actions.apply_defaults ()
+ for name, test in next, tests do
+ texio.write_nl ("[" .. name .. "]")
+ local newfailed, newtotal = test ()
+ total = total + 1
+ pprint_result (name, newfailed, newtotal)
+ failed = failed + newfailed
+ total = total + newtotal
+ end
+
+ if failed == 0 then
+ texio.write_nl (string.format ("[report] all %d tests passed.", total))
+ else
+ texio.write_nl (string.format ("[report] %d of %d tests failed (%d %%).",
+ failed,
+ total,
+ failed / total * 100))
+ end
+ texio.write_nl ""
+ os.exit (0)
+end
+
+return main ()
+
+--- vim:ft=lua:ts=2:et:sw=2
diff --git a/Master/texmf-dist/source/luatex/luaotfload/Makefile b/Master/texmf-dist/source/luatex/luaotfload/Makefile
index 4124a1a49b3..f23c6b84c23 100644
--- a/Master/texmf-dist/source/luatex/luaotfload/Makefile
+++ b/Master/texmf-dist/source/luatex/luaotfload/Makefile
@@ -5,10 +5,14 @@ NAME = luaotfload
DOCSRCDIR = ./doc
SCRIPTSRCDIR = ./scripts
SRCSRCDIR = ./src
+FONTLOADERDIR = $(SRCSRCDIR)/fontloader/runtime
+FONTLOADERSRCDIR= $(SRCSRCDIR)/fontloader/misc
+PACKAGEDIR = $(SRCSRCDIR)/fontloader
BUILDDIR = ./build
MISCDIR = ./misc
SRC = $(wildcard $(SRCSRCDIR)/luaotfload-*.lua)
+SRC += $(wildcard $(FONTLOADERDIR)/*.lua)
SRC += $(SRCSRCDIR)/luaotfload.sty
SRC += $(MISCDIR)/luaotfload-blacklist.cnf
@@ -18,10 +22,13 @@ CONFDEMO = $(MISCDIR)/luaotfload.conf.example
GLYPHSCRIPT = $(SCRIPTSRCDIR)/mkglyphlist
CHARSCRIPT = $(SCRIPTSRCDIR)/mkcharacters
STATUSSCRIPT = $(SCRIPTSRCDIR)/mkstatus
+IMPORTSCRIPT = $(SCRIPTSRCDIR)/mkimport
+TESTSCRIPT = $(SCRIPTSRCDIR)/mktests
GLYPHSOURCE = $(BUILDDIR)/glyphlist.txt
RESOURCESCRIPTS = $(GLYPHSCRIPT) $(CHARSCRIPT) $(STATUSSCRIPT)
+RESOURCESCRIPTS+= $(IMPORTSCRIPT) $(TESTSCRIPT)
TOOLNAME = luaotfload-tool
TOOL = $(SRCSRCDIR)/$(TOOLNAME).lua
@@ -30,6 +37,9 @@ CONFNAME = luaotfload.conf
GRAPH = filegraph
DOCSRC = $(addprefix $(DOCSRCDIR)/$(NAME), -main.tex -latex.tex)
+LOADERSRC = $(wildcard $(FONTLOADERSRCDIR)/*.lua)
+LOADERSRC += $(wildcard $(FONTLOADERSRCDIR)/*.tex)
+LOADERSRC += $(PACKAGEDIR)/luaotfload-package.lua
GRAPHSRC = $(DOCSRCDIR)/$(GRAPH).dot
MANSRC = $(DOCSRCDIR)/$(TOOLNAME).rst $(DOCSRCDIR)/$(CONFNAME).rst
@@ -45,8 +55,10 @@ DOCS = $(DOCPDF) $(DOTPDF) $(MANPAGES)
GLYPHS = $(BUILDDIR)/$(NAME)-glyphlist.lua
CHARS = $(BUILDDIR)/$(NAME)-characters.lua
STATUS = $(BUILDDIR)/$(NAME)-status.lua
-RESOURCES = $(GLYPHS) $(CHARS) $(STATUS)
-SOURCE = $(DOCSRC) $(MANSRC) $(SRC) README COPYING Makefile NEWS $(RESOURCESCRIPTS)
+LOADER = $(BUILDDIR)/fontloader-$(shell date +%F).lua
+RESOURCES = $(GLYPHS) $(CHARS) $(LOADER) $(STATUS)
+SOURCE = $(DOCSRC) $(LOADERSRC) $(MANSRC)
+SOURCE += $(SRC) README COPYING Makefile NEWS $(RESOURCESCRIPTS)
# Files grouped by installation location
SCRIPTSTATUS = $(TOOL) $(RESOURCESCRIPTS)
@@ -85,7 +97,10 @@ LUA = texlua
## variables.
DO_GLYPHS = $(LUA) $(GLYPHSCRIPT) > /dev/null
DO_CHARS = $(LUA) $(CHARSCRIPT) > /dev/null
-DO_STATUS = $(LUA) $(STATUSSCRIPT) > /dev/null
+DO_STATUS = $(LUA) $(STATUSSCRIPT) --fontloader=$(LOADER) >/dev/null
+DO_IMPORT = $(LUA) $(IMPORTSCRIPT) import >/dev/null
+DO_PACKAGE = $(LUA) $(IMPORTSCRIPT) package \
+ $(PACKAGEDIR)/luaotfload-package.lua $(LOADER) >/dev/null
define check-lua-files
@echo validating syntax
@@ -109,8 +124,12 @@ builddir: $(BUILDDIR)
resources: $(RESOURCES)
chars: $(CHARS)
status: $(STATUS)
+package: loader
+loader: $(LOADER)
ctan: $(CTAN_ZIP)
tds: $(TDS_ZIP)
+import:
+ $(DO_IMPORT)
graph: $(DOTPDF)
doc: $(DOCS)
@@ -132,9 +151,12 @@ $(GLYPHS): builddir
$(CHARS): builddir
$(DO_CHARS)
-$(STATUS): builddir
+$(STATUS): builddir loader
$(DO_STATUS)
+$(LOADER): builddir
+ $(DO_PACKAGE)
+
$(BUILDDIR): /dev/null
mkdir -p $(BUILDDIR)
@@ -147,7 +169,7 @@ $(CTAN_ZIP): $(DOCS) $(SOURCE) $(TDS_ZIP)
@echo "Making $@ for CTAN upload."
@$(RM) -- $@
$(make-ctandir)
- cd $(BUILDDIR) && zip -r -9 $(CTAN_ZIPFILE) $(TDS_ZIPFILE) $(NAME) >/dev/null
+ @cd $(BUILDDIR) && zip -r -9 $(CTAN_ZIPFILE) $(TDS_ZIPFILE) $(NAME) >/dev/null
$(CTAN_ZIPSIG): $(CTAN_ZIP)
@echo "Signing package $(CTAN_ZIP)"
@@ -155,8 +177,8 @@ $(CTAN_ZIPSIG): $(CTAN_ZIP)
@gpg --batch --armor --detach-sign "$(CTAN_ZIP)"
define run-install-doc
-@mkdir -p $(DOCDIR) && cp -- $(DOCSTATUS) $(VGND) $(CONFDEMO) $(DOCDIR)
-@mkdir -p $(SRCDIR) && cp -- $(SRCSTATUS) $(SRCDIR)
+@mkdir -p $(DOCDIR) && cp -- $(DOCSTATUS) $(VGND) $(CONFDEMO) $(DOCDIR)
+@mkdir -p $(SRCDIR) && cp -- $(SRCSTATUS) $(SRCDIR)
@mkdir -p $(MAN1DIR) && cp -- $(TOOLMAN) $(MAN1DIR)
@mkdir -p $(MAN5DIR) && cp -- $(CONFMAN) $(MAN5DIR)
endef
@@ -164,6 +186,7 @@ endef
define run-install
@mkdir -p $(SCRIPTDIR) && cp -- $(SCRIPTSTATUS) $(SCRIPTDIR)
@mkdir -p $(RUNDIR) && cp -- $(RESOURCES) $(RUNSTATUS) $(RUNDIR)
+@mkdir -p $(RUNDIR) && cp -- $(LOADERSRC) $(RUNDIR)
endef
$(TDS_ZIP): TEXMFROOT=./tmp-texmf
@@ -177,12 +200,16 @@ $(TDS_ZIP): $(DOCS) $(ALL_STATUS) check
sign: $(CTAN_ZIPSIG)
-.PHONY: install manifest clean mrproper show showtargets check
+.PHONY: install manifest clean mrproper show showtargets check import news
-install: $(ALL_STATUS)
- @echo "Installing in '$(TEXMFROOT)'."
- $(run-install-docs)
- $(run-install)
+install:
+ @echo " ××××××××××××××××××××××××××××××××"
+ @echo " There is no “install” target."
+ @echo " ××××××××××××××××××××××××××××××××"
+ @echo " Compile a TDS zipball (make tds)"
+ @echo " and extract that into your local"
+ @echo " TEXMF instead."
+ @echo " ××××××××××××××××××××××××××××××××"
manifest:
@echo "Source files:"
@@ -218,12 +245,19 @@ showtargets:
@echo " luaotfload.conf(5) (requires Docutils)"
@echo " graph generate file graph (requires GraphViz)"
@echo
+ @echo " loader merge fontloader"
@echo " chars import char-def.lua as luaotfload-characters.lua"
@echo " status create repository info (luaotfload-status.lua)"
@echo
+ @echo " import grab files from upstream"
+ @echo " package package fontloader"
+ @echo
@echo " tds package a zipball according to the TDS"
@echo " ctan package a zipball for uploading to CTAN"
@echo " sign sign zipball"
@echo
+ @echo " clean cleanup side-effects"
+ @echo " mrproper cleanup side-effects as well as make targets"
+ @echo
# vim:noexpandtab:tabstop=8:shiftwidth=2
diff --git a/Master/texmf-dist/source/luatex/luaotfload/filegraph.dot b/Master/texmf-dist/source/luatex/luaotfload/filegraph.dot
index 47db9ea3fcc..9773ebd426f 100644
--- a/Master/texmf-dist/source/luatex/luaotfload/filegraph.dot
+++ b/Master/texmf-dist/source/luatex/luaotfload/filegraph.dot
@@ -10,8 +10,8 @@ strict digraph luaotfload_files { //looks weird with circo ...
size = "21cm";
rankdir = LR;
- ranksep = 0.618;
- nodesep = 1.618;
+ ranksep = 0.618;
+ nodesep = 1.618;
edge [
arrowhead = onormal,
@@ -30,37 +30,38 @@ strict digraph luaotfload_files { //looks weird with circo ...
fontdbutil -> font_names [label="--update",
style=dashed]
- luaotfload -> otfl_fonts_merged [label="merged"]
- luaotfload -> merged_lua_libs [label="unmerged", style=solid]
- luaotfload -> merged_luatex_fonts [label="unmerged", style=solid]
- luaotfload -> merged_context_libs [label="unmerged", style=solid]
+ luaotfload -> merged_lua_libs [style=solid]
+ luaotfload -> luaotfload_init [label="main()", style=solid]
+ luaotfload -> luaotfload_libs [label="main()", style=solid]
- luaotfload -> luaotfload_libs
- luaotfload -> otfl_blacklist_cnf
-
- otfl_fonts_merged -> merged_lua_libs [label="merged",
- style=dotted,
- lhead=cluster_merged]
- otfl_fonts_merged -> merged_luatex_fonts [label="merged",
+ fontloader -> merged_luatex_fonts [label="merged",
style=dotted,
lhead=cluster_merged]
- otfl_fonts_merged -> merged_context_libs [label="merged",
+ fontloader -> merged_context_libs [label="merged",
style=dotted,
lhead=cluster_merged]
+ luaotfload_init -> luaotfload_log [label="init_early()", style=solid]
+ luaotfload_init -> luaotfload_basics_gen [label="init_early()", style=solid]
+ luaotfload_init -> fontloader [label="init_main()", style=solid]
+
+ luaotfload_init -> merged_luatex_fonts [label="unmerged", style=solid]
+ luaotfload_init -> merged_context_libs [label="unmerged", style=solid]
+
merged_luatex_fonts -> font_age [label="luatex-fonts-enc.lua",
ltail=cluster_merged]
fontdbutil -> fontdbutil_diagnostics [label="--diagnose"]
-
fontdbutil -> status [label="version information"]
- fontdbutil_diagnostics -> status [constraint=no, label="hash files"]
+ luaotfload_package -> fontloader [label="merges", style=dashed]
merged_luatex_fonts -> characters [label="luaotfload-auxiliary.lua",
ltail=cluster_merged]
- luaotfload_libs -> font_names [label="luaotfload-database.lua"]
+ luaotfload_libs -> font_names [label="luaotfload-database.lua"]
+ luaotfload_libs -> otfl_blacklist_cnf [label="luaotfload-database.lua"]
+
mkstatus -> status [label="generates from distribution files",
style=dashed]
@@ -71,13 +72,30 @@ strict digraph luaotfload_files { //looks weird with circo ...
mkcharacters -> characters [label="generates from Context’s char-def.lua",
style=dashed]
+ fontdbutil_diagnostics -> status [label="hash files"]
+
+ mkimport -> merged_luatex_fonts [label="pulls", style=dashed, constraint=no];
+ mkimport -> merged_context_libs [label="pulls", style=dashed, constraint=no];
+
subgraph { rank = same;
- mkcharacters;
- mkglyphlist;
- mkstatus;
fontdbutil;
luaotfload }
+ subgraph cluster_scripts {
+ node [style=filled, color=white];
+ style = "filled,rounded";
+ color = "#44000011:#CCCCCC77";
+ //nodesep = "3.0";
+ rank = same;
+ label = "Standalone scripts";
+ gradientangle=90;
+ mkcharacters;
+ mkglyphlist;
+ mkimport;
+ mktest;
+ mkstatus;
+ }
+
/* ····································································
* main files
* ································································· */
@@ -90,6 +108,14 @@ strict digraph luaotfload_files { //looks weird with circo ...
style = "filled,rounded",
penwidth=2]
+ luaotfload_package [label = "luaotfload-package.lua",
+ shape = rect,
+ width = "3.2cm",
+ height = "1.2cm",
+ color = "#01012222",
+ style = "filled,rounded",
+ penwidth=2]
+
fontdbutil_diagnostics [label = "luaotfload-diagnostics.lua",
shape = rect,
width = "3.2cm",
@@ -98,27 +124,43 @@ strict digraph luaotfload_files { //looks weird with circo ...
style = "filled,rounded",
penwidth=2]
+ mktest [label = "mktest",
+ shape = rect,
+ width = "3.2cm",
+ height = "0.618cm",
+ color = "#FFFFFF66",
+ style = "filled,rounded",
+ penwidth=2]
+
+ mkimport [label = "mkimport",
+ shape = rect,
+ width = "3.2cm",
+ height = "0.618cm",
+ color = "#FFFFFF66",
+ style = "filled,rounded",
+ penwidth=2]
+
mkstatus [label = "mkstatus",
shape = rect,
width = "3.2cm",
- height = "1.2cm",
- color = "#01012222",
+ height = "0.618cm",
+ color = "#FFFFFF66",
style = "filled,rounded",
penwidth=2]
mkglyphlist [label = "mkglyphlist",
shape = rect,
width = "3.2cm",
- height = "1.2cm",
- color = "#01012222",
+ height = "0.618cm",
+ color = "#FFFFFF66",
style = "filled,rounded",
penwidth=2]
mkcharacters [label = "mkcharacters",
shape = rect,
width = "3.2cm",
- height = "1.2cm",
- color = "#01012222",
+ height = "0.618cm",
+ color = "#FFFFFF66",
style = "filled,rounded",
penwidth=2]
@@ -138,13 +180,37 @@ strict digraph luaotfload_files { //looks weird with circo ...
* style = "filled,rounded",
* penwidth=2]
*/
- otfl_fonts_merged [label = "luaotfload-fontloader.lua",
- shape = rect,
- width = "3.2cm",
- height = "1.2cm",
- color = "#01012222",
- style = "filled,rounded",
- penwidth=2]
+ luaotfload_init [label = "luaotfload-init.lua",
+ shape = rect,
+ width = "3.2cm",
+ height = "1.2cm",
+ color = "#44440122",
+ style = "filled,rounded",
+ penwidth=2]
+
+ luaotfload_log [label = "luaotfload-log.lua",
+ shape = rect,
+ width = "3.2cm",
+ height = "1.2cm",
+ color = "#44440122",
+ style = "filled,rounded",
+ penwidth=2]
+
+ luaotfload_basics_gen [label = "fontloader-basics-gen.lua",
+ shape = rect,
+ width = "3.2cm",
+ height = "1.2cm",
+ color = "#FFFFFF66",
+ style = "filled,rounded",
+ penwidth=2]
+
+ fontloader [label = "fontloader-YY-MM-DD.lua",
+ shape = rect,
+ width = "3.2cm",
+ height = "1.2cm",
+ color = "#FFFFFF66",
+ style = "filled,rounded",
+ penwidth=2]
/* ····································································
* luaotfload files
@@ -193,16 +259,16 @@ strict digraph luaotfload_files { //looks weird with circo ...
luaotfload_libs [
shape = box,
style = "filled,rounded",
- color = "grey90:goldenrod4",
+ color = "grey90",
fontsize = 10,
label = <
<table cellborder="0" bgcolor="#FFFFFFAA">
<th> <td colspan="2"> <font point-size="12" face="Iwona Italic">Luaotfload Libraries</font> </td> </th>
- <tr> <td>luaotfload-auxiliary.lua</td> <td>luaotfload-features.lua</td> </tr>
- <tr> <td>luaotfload-override.lua</td> <td>luaotfload-loaders.lua</td> </tr>
- <tr> <td>luaotfload-log.lua</td> <td>luaotfload-letterspace.lua</td> </tr>
- <tr> <td>luaotfload-parsers.lua</td> <td>luaotfload-database.lua</td> </tr>
- <tr> <td>luaotfload-color.lua</td> </tr>
+ <tr> <td>luaotfload-auxiliary.lua</td> <td>luaotfload-features.lua</td> </tr>
+ <tr> <td>luaotfload-loaders.lua</td> <td>luaotfload-colors.lua</td> </tr>
+ <tr> <td>luaotfload-resolvers.lua</td> <td>luaotfload-letterspace.lua</td> </tr>
+ <tr> <td>luaotfload-parsers.lua</td> <td>luaotfload-database.lua</td> </tr>
+ <tr> <td>luaotfload-configuration.lua</td><td></td> </tr>
</table>
>,
]
@@ -214,28 +280,40 @@ strict digraph luaotfload_files { //looks weird with circo ...
subgraph cluster_merged {
node [style=filled, color=white];
style = "filled,rounded";
- color = "grey90:dodgerblue4";
+ color = "#912CEE33";
//nodesep = "3.0";
rank = same;
label = "Merged Libraries";
gradientangle=0;
- merged_lua_libs;
merged_luatex_fonts;
merged_context_libs;
}
- otfl_fonts_merged -> merged_lua_libs
- otfl_fonts_merged -> merged_luatex_fonts
- otfl_fonts_merged -> merged_context_libs
+ fontloader -> merged_luatex_fonts
+ fontloader -> merged_context_libs
+
+ subgraph cluster_fontloader {
+ node [style=filled, color=white];
+ style = "filled,rounded";
+ color = "bisque";
+ //nodesep = "3.0";
+ rank = same;
+ label = "Fontloader";
+ gradientangle=0;
+ luaotfload_basics_gen;
+ fontloader;
+ }
merged_lua_libs [
shape = box,
style = "filled,rounded",
- color = "#FFFFFFAA",
+ color = "#CCCC1166",
+ fontsize = 10,
fontsize = 10,
label = <
<table border="0">
- <th> <td colspan="3"> <font point-size="12" face="Iwona Italic">Lua Libraries from Context</font> </td> </th>
+ <th> <td colspan="3"> <font point-size="12" face="Iwona Italic">
+ Lualibs &ndash; Lua Libraries from Context</font> </td> </th>
<tr> <td>l-lua.lua</td> <td>l-lpeg.lua</td> <td>l-function.lua</td> </tr>
<tr> <td>l-string.lua</td> <td>l-table.lua</td> <td>l-io.lua</td> </tr>
<tr> <td>l-file.lua</td> <td>l-boolean.lua</td> <td>l-math.lua</td> </tr>
@@ -252,17 +330,13 @@ strict digraph luaotfload_files { //looks weird with circo ...
label = <
<table border="0">
<th> <td colspan="2"> <font point-size="12" face="Iwona Italic">Font Loader (LuaTeX-Fonts)</font> </td> </th>
- <tr> <td>luatex-basics-gen.lua</td> <td>luatex-basics-nod.lua</td> </tr>
+ <tr> <td>luatex-fonts-cbk.lua</td> <td>luatex-basics-nod.lua</td> </tr>
<tr> <td>luatex-fonts-enc.lua</td> <td>luatex-fonts-syn.lua</td> </tr>
<tr> <td>luatex-font-tfm.lua</td> <td>luatex-font-afm.lua</td> </tr>
<tr> <td>luatex-font-afk.lua</td> <td>luatex-fonts-tfm.lua</td> </tr>
<tr> <td>luatex-fonts-chr.lua</td> <td>luatex-fonts-lua.lua</td> </tr>
<tr> <td>luatex-fonts-inj.lua</td> <td>luatex-fonts-otn.lua</td> </tr>
<tr> <td>luatex-fonts-def.lua</td> <td>luatex-fonts-ext.lua</td> </tr>
- <tr> <td>luatex-fonts-cbk.lua</td> </tr>
-
-
-
</table>
>,
]
diff --git a/Master/texmf-dist/source/luatex/luaotfload/luaotfload-latex.tex b/Master/texmf-dist/source/luatex/luaotfload/luaotfload-latex.tex
index 34c494d58f7..bdcf45f0929 100644
--- a/Master/texmf-dist/source/luatex/luaotfload/luaotfload-latex.tex
+++ b/Master/texmf-dist/source/luatex/luaotfload/luaotfload-latex.tex
@@ -1,5 +1,5 @@
-\luatexsuppresslongerror1%% sigh ...
-%% Copyright (C) 2009-2014
+\suppresslongerror1%% sigh ...
+%% Copyright (C) 2009-2015
%%
%% by Elie Roux <elie.roux@telecom-bretagne.eu>
%% and Khaled Hosny <khaledhosny@eglug.org>
@@ -129,7 +129,7 @@
\definehighlight [fileent][\ttfamily\restoreunderscore] %% files, dirs
\definehighlight [texmacro][\sffamily\itshape\textbackslash] %% cs
-\definehighlight [luafunction][\sffamily\itshape\restoreunderscore] %% lua identifiers
+\definehighlight [luaident][\sffamily\itshape\restoreunderscore] %% lua identifiers
\definehighlight [identifier][\sffamily] %% names
\definehighlight [abbrev][\rmfamily\scshape] %% acronyms
\definehighlight [emphasis][\rmfamily\slshape] %% level 1 emph
@@ -206,6 +206,7 @@
%% one that we could map directly onto Latex’s \verb|…|.
\usepackage {listings}
+\usepackage {luatexbase}
\lstset {
basicstyle=\ttfamily,
}
@@ -220,9 +221,9 @@
\newcount \othercatcode \othercatcode 12
\newcount \activecatcode \othercatcode 13
-\newluatexcatcodetable \vrbcatcodes
-\setluatexcatcodetable \vrbcatcodes {%
- \luatexcatcodetable \CatcodeTableIniTeX
+\newcatcodetable \vrbcatcodes
+\setcatcodetable \vrbcatcodes {%
+ \catcodetable \CatcodeTableIniTeX
\catcode 9 \othercatcode %% \tabasciicode
\catcode 13 \othercatcode %% \endoflineasciicode
\catcode 12 \othercatcode %% \formfeedasciicode
@@ -230,48 +231,85 @@
\catcode 32 \othercatcode %% \spaceasciicode
}
-\newluatexcatcodetable \literalcatcodes
-\setluatexcatcodetable \literalcatcodes {%
- \luatexcatcodetable \CatcodeTableString
+\directlua {
+ document = document or { }
+ document.vrbcatcodesidx = tonumber (\the \vrbcatcodes)
+}
+
+\newcatcodetable \literalcatcodes
+\setcatcodetable \literalcatcodes {%
+ \catcodetable \CatcodeTableString
\catcode 32 \activecatcode %% \spaceasciicode
}
+\def \listingsurroundskip {\vskip \baselineskip}
+
\def \beginlisting {%
+ \noindent
\begingroup
- \luatexcatcodetable \vrbcatcodes
+ \catcodetable \vrbcatcodes
\beginlistingindeed%
}
\directlua {
- local texprint = tex.print
- local stringsub = string.sub
- local backslash = string.byte (0x5c)
- document = document or { }
+ local texsprint = tex.sprint
+ local stringis_empty = string.is_empty
+ local stringsub = string.sub
+ local stringgsub = string.gsub
+ %local backslash = unicode.utf8.char (0x200c)
+ local backslash = unicode.utf8.char (0x5c)
+ local escaped = [[\string\string\string\]]
document.printlines = function (buffer)
- for _, line in next, string.explode (buffer, "\noexpand\n") do
- if stringsub (line, 1, 1) == " " then
- line = backslash .. line
+ local lines = string.explode (buffer, "\noexpand\n")
+ print ""
+ for i, line in next, lines do
+ local line = stringgsub (line, backslash, escaped)
+ if stringis_empty (line) then
+ print (i, "listing: <empty line />")
+ texsprint [[\string\listingpar]]
+ else
+ local line = [[\string\beginlistingline]]
+ .. line
+ .. [[\string\endlistingline]]
+ .. [[\string\listingpar]]
+ print (i, "listing: «" .. line .. "»")
+ texsprint (document.vrbcatcodesidx, line)
end
- texprint (-1, line)
- texprint (-1, "")
end
end
}
+\def \listingpar {\endgraf}
+
+\let \endlistingline \relax
+\let \endlisting \relax
+
+\protected \def \beginlistingline{%
+ \leavevmode
+ \begingroup
+ \beginlistinglineindeed%
+}
+
+\def \beginlistinglineindeed #1\endlistingline{%
+ \endgroup
+ \hbox{%
+ \addfontfeature {RawFeature=-tlig;-liga}%% So one can’t just turn them all off at once using the ``Ligatures`` key?
+ \obeyspaces
+ #1}%
+}
+
\def \beginlistingindeed#1\endlisting{%
\endgroup
\begingroup
+ \endgraf
+ \listingsurroundskip
\ttfamily
\small
- \begin {quote}
- \bgroup
- \addfontfeature {RawFeature=-tlig;-liga}%% So one can’t just turn them all off at once using the ``Ligatures`` key?
- \luatexcatcodetable \literalcatcodes
- \obeyspaces
- \obeylines
- \directlua{document.printlines ([==[\detokenize {#1}]==])}
- \egroup
- \end {quote}
+ \parindent = 0em
+ \leftskip = 2em
+ \hangindent = 2em
+ \directlua{document.printlines ([==[\detokenize {#1}]==])}%
+ \listingsurroundskip
\endgroup
}
@@ -301,7 +339,7 @@
\definelist [descriptions]{\normalitem {\textbf \first}\hfill\break}
\definelist [definitions]{\normalitem {\fileent {\first}}}
\definelist [filelist]{\normalitem {\fileent {\first}}\space--\hskip 1em}
-\definelist [functionlist]{\normalitem {\luafunction {\first}}\hfill\break}
+\definelist [functionlist]{\normalitem {\luaident {\first}}\hfill\break}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% columns
diff --git a/Master/texmf-dist/source/luatex/luaotfload/luaotfload-main.tex b/Master/texmf-dist/source/luatex/luaotfload/luaotfload-main.tex
index 2e89ff000e8..967f5a2b346 100644
--- a/Master/texmf-dist/source/luatex/luaotfload/luaotfload-main.tex
+++ b/Master/texmf-dist/source/luatex/luaotfload/luaotfload-main.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2009-2014
+%% Copyright (C) 2009-2015
%%
%% by Elie Roux <elie.roux@telecom-bretagne.eu>
%% and Khaled Hosny <khaledhosny@eglug.org>
@@ -32,7 +32,7 @@
\beginfrontmatter
\setdocumenttitle {The \identifier{luaotfload} package}
- \setdocumentdate {2014/07/13 v2.5}
+ \setdocumentdate {2015/12/09 v2.6}
\setdocumentauthor {Elie Roux · Khaled Hosny · Philipp Gesang\\
Home: \hyperlink {https://github.com/lualatex/luaotfload}\\
Support: \email {lualatex-dev@tug.org}}
@@ -43,6 +43,11 @@
This package is an adaptation of the \CONTEXT font loading system.
It allows for loading \OpenType fonts with an extended syntax and adds
support for a variety of font features.
+
+ After discussion of the font loading API, this manual gives an
+ overview of the core components of \identifier{Luaotfload}: The
+ packaged font loader code, the names database, configuration, and
+ helper functions on the \LUA\ end.
\endabstractcontent
\endfrontmatter
@@ -233,7 +238,7 @@ where \meta{prefix} is either \inlinecode{file:} or \inlinecode {name:}.\footnot
needed, for instance when supplying a customized tex distribution.
The \inlinecode {my} lookup takes this a step further: it lets you define
- a custom resolver function and hook it into the \luafunction{resolve_font}
+ a custom resolver function and hook it into the \luaident{resolve_font}
callback.
%
This ensures full control over how a file is located.
@@ -606,7 +611,9 @@ obviously, \inlinecode{random}.
the file \inlinecode{tkrn.fea} that is part of \identifier{luaotfload}.
It can be read and applied as follows:
- \inlinecode{\\font \\test = Latin Modern Roman:featurefile=tkrn.fea;+tkrn}
+ \beginlisting
+ \font \test = Latin Modern Roman:featurefile=tkrn.fea;+tkrn
+ \endlisting
\endaltitem
\beginaltitem {color}
@@ -619,7 +626,7 @@ obviously, \inlinecode{random}.
For example, in order to set text in semitransparent red:
\beginlisting
-\font \test = "Latin Modern Roman:color=FF0000BB"
+ \font \test = "Latin Modern Roman:color=FF0000BB"
\endlisting
\endaltitem
@@ -658,13 +665,13 @@ obviously, \inlinecode{random}.
letterspacing width:
\beginlisting
-\font \iwonakernedA = "file:Iwona-Regular.otf:kernfactor=0.125"
-\font \iwonakernedB = "file:Iwona-Regular.otf:letterspace=12.5"
+ \font \iwonakernedA = "file:Iwona-Regular.otf:kernfactor=0.125"
+ \font \iwonakernedB = "file:Iwona-Regular.otf:letterspace=12.5"
\endlisting
Specific pairs of letters and ligatures may be exempt from
letterspacing by defining the \LUA functions
- \luafunction{keeptogether} and \luafunction{keepligature},
+ \luaident{keeptogether} and \luaident{keepligature},
respectively, inside the namespace \inlinecode {luaotfload.letterspace}.
%
Both functions are called whenever the letterspacing callback
@@ -673,10 +680,10 @@ obviously, \inlinecode{random}.
If they return a true-ish value, no extra kern is inserted at
the current position.
%
- \luafunction{keeptogether} receives a pair of consecutive
+ \luaident{keeptogether} receives a pair of consecutive
glyph nodes in order of their appearance in the node list.
%
- \luafunction{keepligature} receives a single node which can be
+ \luaident{keepligature} receives a single node which can be
analyzed into components.
%
(For details refer to the \emphasis{glyph nodes} section in the
@@ -686,7 +693,7 @@ obviously, \inlinecode{random}.
user.
\endaltitem
-\ifcontextmkiv
+\iffalse
\startbuffer [printvectors]
\directlua{inspect(fonts.protrusions.setups.default)
inspect(fonts.expansions.setups.default)}
@@ -706,7 +713,7 @@ obviously, \inlinecode{random}.
%
Alternatively and with loss of information, you can dump
those tables into your terminal by issuing
- \unless \ifcontextmkiv
+ \unless \iffalse
\beginlisting
\directlua{inspect(fonts.protrusions.setups.default)
inspect(fonts.expansions.setups.default)}
@@ -731,7 +738,7 @@ obviously, \inlinecode{random}.
}:
\beginlisting
-\font \test = LatinModernRoman:protrusion=default
+ \font \test = LatinModernRoman:protrusion=default
\endlisting
\endaltitem
\enddescriptions
@@ -763,7 +770,7 @@ Currently (2014) there are three of them:
remapping feature.
}:
- \unless \ifcontextmkiv
+ \unless \iffalse
%% Using braced arg syntax with inline code appears to be
%% impossible within Latex tables -- just ignore the weird
%% exclamation points below.
@@ -905,7 +912,7 @@ directories.
\tablefloat {table-searchpaths}
{List of paths searched for each supported operating system.}
{%
- \unless \ifcontextmkiv
+ \unless \iffalse
\begincentered
\begintabulate [lp{.5\textwidth}]
\beginrow
@@ -1056,27 +1063,28 @@ An example with explicit paths:
\endsection
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-\beginsection {Files from \CONTEXT and \LUATEX-Fonts}
+\beginsection {The Fontloader}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-\identifier{luaotfload} relies on code originally written by Hans
-Hagen for the \hyperlink[\identifier{\CONTEXT}]{http://wiki.contextgarden.net}
+\beginsubsection {Overview}
+
+To a large extent, \identifier{luaotfload} relies on code originally
+written by Hans Hagen for the
+\hyperlink[\identifier{\CONTEXT}]{http://wiki.contextgarden.net}
format.
%
-It integrates the font loader as distributed in
-the \identifier{\LUATEX-Fonts} package.
+It integrates the font loader, written entirely in \LUA, as distributed
+in the \identifier{\LUATEX-Fonts} package.
%
The original \LUA source files have been combined using the
-\fileent{mtx-package} script into a single, self-contained blob.
-In this form the font loader has no further dependencies\footnote{%
- It covers, however, to some extent the functionality of the
- \identifier{lualibs} package.
-}
-and requires only minor adaptions to integrate into
+\fileent{mtx-package} script into a single, self-contained blob. In
+this form the font loader depends only on the \identifier{lualibs}
+package and requires only minor adaptions to integrate into
\identifier{luaotfload}.
-%
-The guiding principle is to let \CONTEXT/\LUATEX-Fonts take care of
-the implementation, and update the imported code from time to time.
+
+The guiding principle is to let \CONTEXT/\LUATEX-Fonts take care of the
+implementation, and update the imported code as frequently as
+necessary.
%
As maintainers, we aim at importing files from upstream essentially
\emphasis{unmodified}, except for renaming them to prevent name
@@ -1087,19 +1095,24 @@ This job has been greatly alleviated since the advent of
manually spotted and extracted from the \CONTEXT source code in a
complicated and error-prone fashion.
+\endsubsection
+
+\beginsubsection {Contents and Dependencies}
+
Below is a commented list of the files distributed with
\identifier{luaotfload} in one way or the other.
%
See figure \ref{file-graph} on page \pageref{file-graph} for a
graphical representation of the dependencies.
%
-From \LUATEX-Fonts, only the file \fileent{luatex-fonts-merged.lua}
-has been imported as \fileent{luaotfload-fontloader.lua}.
-%
-It is generated by \fileent{mtx-package}, a \LUA source code merging
-too developed by Hans Hagen.\footnote{%
- \fileent{mtx-package} is
- \hyperlink [part of \CONTEXT]{http://repo.or.cz/w/context.git/blob_plain/refs/heads/origin:/scripts/context/lua/mtx-package.lua}
+\label{package}%
+Through the script \fileent{luaotfload-package.lua} the \CONTEXT
+utility \fileent{mtx-package} is invoked to create the
+\identifier{luaotfload} fontloader as a merged (amalgamated) source
+file.\footnote{%
+ \fileent{mtx-package}, a \LUA source code merging tool developed by
+ Hans Hagen, is
+ \hyperlink [part of \CONTEXT]{https://bitbucket.org/phg/context-mirror/src/726a663be481042003566d4614266b940b5a0c91/scripts/context/lua/mtx-package.lua?at=beta}
and requires \fileent{mtxrun}.
Run
\inlinecode {mtxrun --script package --help}
@@ -1107,31 +1120,56 @@ too developed by Hans Hagen.\footnote{%
For the actual merging code see the file
\fileent{util-mrg.lua} that is part of \CONTEXT.
}
-It houses several \LUA files that can be classed in three
-categories.
+%
+This file constitutes the “default fontloader” and is part of the
+\identifier{luaotfload} package as \fileent{fontloader-YY-MM-DD.lua},
+where the uppercase letters are placeholders for the build date.
+%
+A companion to it, \fileent{luatex-basics-gen.lua} must be loaded
+beforehand to set up parts of the environment required by the \CONTEXT
+libraries.
+%
+During a \TEX\ run, the fontloader initialization and injection happens
+in the module \fileent{luaotfload-init.lua}.
+%
+Additionally, the “reference fontloader” as imported from \LUATEX-Fonts
+is provided as the file \fileent{fontloader-reference.lua}.
+%
+This file is self-contained in that it packages all the auxiliary \LUA
+libraries too, as Luaotfload did up to the 2.5 series; since that job
+has been offloaded to the \identifier{Lualibs} package, loading this
+fontloader introduces a certain code duplication.
+
+A number of \emphasis{\LUA utility libraries} are not part of the
+\identifier{luaotfload} fontloader, contrary to its equivalent in
+\LUATEX-Fonts. These are already provided by the \identifier{lualibs}
+and have thus been omitted from the merge.\footnote{%
+ Faithful listeners will remember the pre-2.6 era when the fontloader
+ used to be integrated as-is which caused all kinds of code
+ duplication with the pervasive \identifier{lualibs} package.
+ This conceptual glitch has since been amended by tightening the
+ coupling with the excellent \CONTEXT\ toolchain.
+}
-\begindefinitions
- \beginnormalitem
- \emphasis{\LUA utility libraries}, a subset
- of what is provided by the \identifier{lualibs}
- package.
-
- \begindoublecolumns
- \begindefinitions
- \beginaltitem {l-lua.lua} \endaltitem
- \beginaltitem {l-lpeg.lua} \endaltitem
- \beginaltitem {l-function.lua} \endaltitem
- \beginaltitem {l-string.lua} \endaltitem
- \beginaltitem {l-table.lua} \endaltitem
- \beginaltitem {l-io.lua} \endaltitem
- \beginaltitem {l-file.lua} \endaltitem
- \beginaltitem {l-boolean.lua} \endaltitem
- \beginaltitem {l-math.lua} \endaltitem
- \beginaltitem {util-str.lua} \endaltitem
- \enddefinitions
- \enddoublecolumns
- \endnormalitem
+\begindoublecolumns
+ \begindefinitions
+ \beginaltitem {l-lua.lua} \endaltitem
+ \beginaltitem {l-lpeg.lua} \endaltitem
+ \beginaltitem {l-function.lua} \endaltitem
+ \beginaltitem {l-string.lua} \endaltitem
+ \beginaltitem {l-table.lua} \endaltitem
+ \beginaltitem {l-io.lua} \endaltitem
+ \beginaltitem {l-file.lua} \endaltitem
+ \beginaltitem {l-boolean.lua} \endaltitem
+ \beginaltitem {l-math.lua} \endaltitem
+ \beginaltitem {util-str.lua} \endaltitem
+ \enddefinitions
+\enddoublecolumns
+
+The reference fontloader is home to several \LUA files that can be
+grouped twofold as below:
+\begindefinitions
\beginnormalitem
The \emphasis{font loader} itself.
These files have been written for
@@ -1139,7 +1177,6 @@ categories.
with \identifier{luaotfload}.
\begindoublecolumns
\begindefinitions
- \beginaltitem{luatex-basics-gen.lua} \endaltitem
\beginaltitem{luatex-basics-nod.lua} \endaltitem
\beginaltitem{luatex-fonts-enc.lua} \endaltitem
\beginaltitem{luatex-fonts-syn.lua} \endaltitem
@@ -1177,16 +1214,13 @@ categories.
\endnormalitem
\enddefinitions
-Note that if \identifier{luaotfload} cannot locate the
-merged file, it will load the individual \LUA libraries
-instead.
+As an alternative to the merged file, \identifier {Luaotfload} may load
+individual unpackaged \LUA libraries that come with the source, or even
+use the files from Context directly.
%
-Their names remain the same as in \CONTEXT (without the
-\inlinecode {otfl}-prefix) since we imported the relevant section of
-\fileent{luatex-fonts.lua} unmodified into \fileent{luaotfload-main.lua}.
-Thus if you prefer running bleeding edge code from the
-\CONTEXT beta, all you have to do is remove
-\fileent{luaotfload-merged.lua} from the search path.
+Thus if you prefer running bleeding edge code from the \CONTEXT beta,
+choose the \inlinecode {context} fontloader via the configuration file
+(see sections \ref{sec:conf} and \ref{sec:pkg} below).
Also, the merged file at some point loads the Adobe Glyph List from a
\LUA table that is contained in \fileent{luaotfload-glyphlist.lua},
@@ -1194,8 +1228,9 @@ which is automatically generated by the script
\fileent{mkglyphlist}.\footnote{%
See \fileent{luaotfload-font-enc.lua}.
The hard-coded file name is why we have to replace the procedure
- that loads the file in \fileent{luaotfload-override.lua}.
+ that loads the file in \fileent{luaotfload-init.lua}.
}
+%
There is a make target \identifier{glyphs} that will create a fresh
glyph list so we don’t need to import it from \CONTEXT any longer.
@@ -1209,7 +1244,10 @@ files not contained in the merge. Some of these have no equivalent in
font feature handling; incorporates some of the code from
\fileent{font-otc} from \CONTEXT;
\endaltitem
- \beginaltitem {luaotfload-override.lua}
+ \beginaltitem {luaotfload-configuration.lua}
+ handling of \fileent{luaotfload.conf(5)}.
+ \endaltitem
+ \beginaltitem {luaotfload-log.lua}
overrides the \CONTEXT logging functionality.
\endaltitem
\beginaltitem {luaotfload-loaders.lua}
@@ -1222,6 +1260,9 @@ files not contained in the merge. Some of these have no equivalent in
\beginaltitem {luaotfload-database.lua}
font names database.
\endaltitem
+ \beginaltitem {luaotfload-resolvers.lua}
+ file name resolvers.
+ \endaltitem
\beginaltitem {luaotfload-colors.lua}
color handling.
\endaltitem
@@ -1239,13 +1280,154 @@ files not contained in the merge. Some of these have no equivalent in
{Schematic of the files in \identifier{Luaotfload}}
{filegraph.pdf}
+\endsubsection
+
+\beginsubsection {Packaging}
+
+\label{sec:pkg}%
+The fontloader code is integrated as an isolated component that can be
+switched out on demand.
+%
+To specify the fontloader you wish to use, the configuration file
+(described in section \ref{sec:conf}) provides the option
+\inlinecode{fontloader}.
+%
+Its value can be one of the identifiers \inlinecode{default} or
+\inlinecode{reference} (see above, section \ref{package}) or the name
+of a file somewhere in the search path of \LUATEX.
+%
+This will make \identifier {Luaotfload} locate the \CONTEXT source by
+means of \identifier{kpathsea} lookups and use those instead of the
+merged package.
+%
+The parameter may be extended with a path to the \CONTEXT
+\fileent{texmf}, separated with a colon:
+
+\beginlisting
+[run]
+ fontloader = context:~/context/tex/texmf-context
+\endlisting
+
+\noindent This setting allows accessing an installation -- e. g. the
+standalone distribution or a source repository -- outside the current
+\TEX distribution.
+
+Like the \identifier{Lualibs} package, the fontloader is deployed as a
+\emphasis{merged package} containing a series of \LUA files joined
+together in their expected order and stripped of non-significant parts.
+%
+The \fileent{mkimport} utility assists in pulling the files from a
+\CONTEXT tree and packaging them for use with \identifier{Luaotfload}.%
+%
+The state of the files currently in \identifier{Luaotfload}’s
+repository can be queried:
+\beginlisting
+./scripts/mkimport news
+\endlisting
+%
+The subcommand for importing takes the prefix of the desired \CONTEXT
+\identifier{texmf} as an optional argument:
+\beginlisting
+./scripts/mkimport import ~/context/tex/texmf-context
+\endlisting
+%
+Whereas the command for packaging requires a path to the
+\emphasis{package description file} and the output name to be passed.
+\beginlisting
+./scripts/mkimport package ./src/fontloader/luaotfload-package.lua fontloader-custom.lua
+\endlisting
+
+From the toplevel makefile, the targets \inlinecode{import} and
+\inlinecode{package} provide easy access to the commands as invoked during
+the \identifier{Luaotfload} build process.\footnote{%
+ \emphasis{Hint for those interested in the packaging process}: issue
+ \inlinecode{make show} for a list of available build routines.
+}
+These will call \inlinecode{mkimport} script with the correct
+parameters to generate a datestamped package.
+%
+Whether files have been updated in the upstream distribution can be
+queried by \inlinecode{./scripts/mkimport news}.
+%
+This will compare the imported files with their counterparts in the
+\CONTEXT distribution and report changes.
+
+\endsubsection
+
\endsection
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\beginsection {Configuration Files}
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\beginnarrower
+ \emphasis{Caution}: For the authoritative documentation, consult the
+ manpage for \fileent{luaotfload.conf(5)}.
+\endnarrower
+
+\label{sec:conf}
+The runtime behavior of \identifier{Luaotfload} can be customized by
+means of a configuration file.
+% location
+At startup, it attempts to locate a file called \fileent
+{luaotfload.conf} or \fileent {luaotfloadrc} at a number of candidate
+locations:
+
+\begincentered
+ \begindefinitions
+ \beginnormalitem \fileent{./luaotfload.conf} \endnormalitem
+ \beginnormalitem \fileent{./luaotfloadrc} \endnormalitem
+ \beginnormalitem \fileent{\$XDG_CONFIG_HOME/luaotfload/luaotfload.conf} \endnormalitem
+ \beginnormalitem \fileent{\$XDG_CONFIG_HOME/luaotfload/luaotfload.rc} \endnormalitem
+ \beginnormalitem \fileent{~/.luaotfloadrc} \endnormalitem
+ \enddefinitions
+\endcentered
+
+\beginnarrower
+ \emphasis{Caution}: The configuration potentially modifies the final
+ document. A project-local file belongs under version control along
+ with the rest of the document. This is to ensure that everybody who
+ builds the project also receives the same customizations as the
+ author.
+\endnarrower
+
+% syntax
+The syntax is fairly close to the format used by
+\fileent{git-config(1)} which in turn was derived from the popular
+\identifier{.INI} format: Lines of key-value pairs are grouped under
+different configuration “sections”.\footnote{%
+ The configuration parser in \fileent {luoatfload-parsers.lua} might
+ be employed by other packages for similar purposes.
+}
+% example settings
+An example for customization via \fileent {luaotfload.conf} might look
+as below:
+
+\beginlisting
+; Example luaotfload.conf containing a rudimentary configuration
+[db]
+ update-live = false
+[run]
+ color-callback = pre_linebreak_filter
+ definer = info_patch
+ log-level = 5
+[default-features]
+ global = mode=base
+\endlisting
+
+This specifies that for the given project, \identifier{Luaotfload}
+shall not attempt to automatically scan for fonts if it can’t resolve a
+request. The font-based colorization will happen during \LUATEX’s
+pre-linebreak filter. The fontloader will output verbose information
+about the fonts at definition time along with globally increased
+verbosity. Lastly, the fontloader defaults to the less expensive
+\luaident{base} mode like it does in \CONTEXT.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\beginsection {Auxiliary Functions}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-With release version 2.2, \identifier{luaotfload} received
+With release version 2.2, \identifier{Luaotfload} received
additional functions for package authors to call from outside
(see the file \fileent{luaotfload-auxiliary.lua} for details).
%
@@ -1254,7 +1436,7 @@ The purpose of this addition twofold.
Firstly, \identifier{luaotfload} failed to provide a stable interface
to internals in the past which resulted in an unmanageable situation
of different packages abusing the raw access to font objects by means
-of the \luafunction{patch_font} callback.
+of the \luaident{patch_font} callback.
%
When the structure of the font object changed due to an update, all
of these imploded and several packages had to be fixed while
@@ -1275,7 +1457,7 @@ additions.
\beginsubsection {Callback Functions}
-The \luafunction{patch_font} callback is inserted in the wrapper
+The \luaident{patch_font} callback is inserted in the wrapper
\identifier{luaotfload} provides for the font definition callback.
%
At this place it allows manipulating the font object immediately after
@@ -1367,8 +1549,8 @@ are defined for which scripts.
\beginfunctionlist
\beginaltitem {aux.font_has_glyph (id : int, index : int)}
- Predicate that returns true if the font \luafunction{id}
- has glyph \luafunction{index}.
+ Predicate that returns true if the font \luaident{id}
+ has glyph \luaident{index}.
\endaltitem
\beginaltitem {aux.slot_of_name(name : string)}
@@ -1377,33 +1559,33 @@ are defined for which scripts.
\endaltitem
\beginaltitem {aux.name_of_slot(slot : int)}
- The inverse of \luafunction{slot_of_name}; note that this
+ The inverse of \luaident{slot_of_name}; note that this
might be incomplete as multiple glyph names may map to the
same codepoint, only one of which is returned by
- \luafunction{name_of_slot}.
+ \luaident{name_of_slot}.
\endaltitem
\beginaltitem {aux.provides_script(id : int, script : string)}
- Test if a font supports \luafunction{script}.
+ Test if a font supports \luaident{script}.
\endaltitem
\beginaltitem {aux.provides_language(id : int, script : string, language : string)}
- Test if a font defines \luafunction{language} for a given
- \luafunction{script}.
+ Test if a font defines \luaident{language} for a given
+ \luaident{script}.
\endaltitem
\beginaltitem {aux.provides_feature(id : int, script : string,
language : string, feature : string)}
- Test if a font defines \luafunction{feature} for
- \luafunction{language} for a given \luafunction{script}.
+ Test if a font defines \luaident{feature} for
+ \luaident{language} for a given \luaident{script}.
\endaltitem
\beginaltitem {aux.get_math_dimension(id : int, dimension : string)}
- Get the dimension \luafunction{dimension} of font \luafunction{id}.
+ Get the dimension \luaident{dimension} of font \luaident{id}.
\endaltitem
\beginaltitem {aux.sprint_math_dimension(id : int, dimension : string)}
- Same as \luafunction{get_math_dimension()}, but output the value
+ Same as \luaident{get_math_dimension()}, but output the value
in scaled points at the \TEX end.
\endaltitem
@@ -1416,7 +1598,7 @@ are defined for which scripts.
%% not implemented, may come back later
\beginfunctionlist
% \beginaltitem {aux.scan_external_dir(dir : string)}
-% Include fonts in directory \luafunction{dir} in font lookups without
+% Include fonts in directory \luaident{dir} in font lookups without
% adding them to the database.
%
\beginaltitem {aux.read_font_index (void)}
@@ -1527,9 +1709,9 @@ Another strategy that helps avoiding problems is to not access raw
Some of them, even though they are dangerous to access, have not been
overridden or disabled.
%
-Thus, whenever possible prefer the functions in the \luafunction{aux}
+Thus, whenever possible prefer the functions in the \luaident{aux}
namespace over direct manipulation of font objects. For example, raw
-access to the \luafunction{font.fonts} table like:
+access to the \luaident{font.fonts} table like:
\beginlisting
local somefont = font.fonts[2]
@@ -1537,16 +1719,16 @@ access to the \luafunction{font.fonts} table like:
\noindent can render already defined fonts unusable.
%
-Instead, the function \luafunction{font.getfont()} should be used
+Instead, the function \luaident{font.getfont()} should be used
because it has been replaced by a safe variant.
-However, \luafunction{font.getfont()} only covers fonts handled by the
+However, \luaident{font.getfont()} only covers fonts handled by the
font loader, e.~g. \identifier{OpenType} and \identifier{TrueType}
fonts, but not \abbrev{tfm} or \abbrev{ofm}.
%
Should you absolutely require access to all fonts known to \LUATEX,
including the virtual and autogenerated ones, then you need to query
-both \luafunction{font.getfont()} and \luafunction{font.fonts}.
+both \luaident{font.getfont()} and \luaident{font.fonts}.
%
In this case, best define you own accessor:
diff --git a/Master/texmf-dist/source/luatex/luaotfload/luaotfload-tool.rst b/Master/texmf-dist/source/luatex/luaotfload/luaotfload-tool.rst
index 4b1a93456f3..4c05c44a812 100644
--- a/Master/texmf-dist/source/luatex/luaotfload/luaotfload-tool.rst
+++ b/Master/texmf-dist/source/luatex/luaotfload/luaotfload-tool.rst
@@ -6,9 +6,9 @@
generate and query the Luaotfload font names database
-----------------------------------------------------------------------
-:Date: 2014-03-30
+:Date: 2015-12-09
:Copyright: GPL v2.0
-:Version: 2.5
+:Version: 2.6
:Manual section: 1
:Manual group: text processing
@@ -42,6 +42,8 @@ SYNOPSIS
**luaotfload-tool** --diagnose=CHECK
+**luaotfload-tool** --conf=FILE --dumpconf
+
DESCRIPTION
=======================================================================
@@ -277,6 +279,13 @@ miscellaneous
commas, e.g. ``--diagnose=files,permissions``.
Specify ``thorough`` to run all checks.
+--conf=FILE Read the configuration from *FILE*. See
+ **luaotfload.conf**\(%) for documentation
+ concerning the format and available options.
+--dumpconf Print the currently active configuration; the
+ output can be saved to a file and used for
+ bootstrapping a custom configuration files.
+
FILES
=======================================================================
@@ -297,7 +306,7 @@ them with the next run of *LuaTeX*.
SEE ALSO
=======================================================================
-**luatex** (1), **lua** (1)
+**luaotfload.conf**\(5), **luatex**\(1), **lua**\(1)
* ``texdoc luaotfload`` to display the manual for the *Luaotfload*
package
diff --git a/Master/texmf-dist/source/luatex/luaotfload/luaotfload.conf.rst b/Master/texmf-dist/source/luatex/luaotfload/luaotfload.conf.rst
index 774095b5dec..d624e00f729 100644
--- a/Master/texmf-dist/source/luatex/luaotfload/luaotfload.conf.rst
+++ b/Master/texmf-dist/source/luatex/luaotfload/luaotfload.conf.rst
@@ -6,9 +6,9 @@
Luaotfload configuration file
-----------------------------------------------------------------------
-:Date: 2014-06-09
+:Date: 2015-12-09
:Copyright: GPL v2.0
-:Version: 2.5
+:Version: 2.6
:Manual section: 5
:Manual group: text processing
@@ -55,7 +55,14 @@ along with lots of other information.
To observe the difference in behavior, save above snippet to
``./luaotfload.conf`` and update the font index: ::
- luaotfload --update --force
+ luaotfload-tool --update --force
+
+The current configuration can be written to disk using
+**luaotfload-tool**: ::
+
+ luaotfload-tool --dumpconf > luaotfload.conf
+
+The result can itself be used as a configuration file.
SYNTAX
@@ -91,8 +98,9 @@ VARIABLES
Variables in belong into a configuration section and their values must
be of a certain type. Some of them have further constraints. For
example, the “color callback” must be a string of one of the values
-``pre_linebreak_filter`` or ``pre_output_filter``, defined in the
-section *run*.
+``post_linebreak_filter``, ``pre_linebreak_filter``, or
+``pre_output_filter``, defined in the section *run* of the
+configuration file.
Currently, the configuration is organized into four sections:
@@ -260,7 +268,7 @@ Section ``run``
+------------------+--------+------------------------------+
| variable | type | default |
+------------------+--------+------------------------------+
-| color-callback | s | ``"pre_linebreak_filter"`` |
+| color-callback | s | ``"post_linebreak_filter"`` |
+------------------+--------+------------------------------+
| definer | s | ``"patch"`` |
+------------------+--------+------------------------------+
@@ -268,13 +276,19 @@ Section ``run``
+------------------+--------+------------------------------+
| resolver | s | ``"cached"`` |
+------------------+--------+------------------------------+
+| fontloader | s | ``"default"`` |
++------------------+--------+------------------------------+
The ``color-callback`` option determines the stage at which fonts that
defined with a ``color=xxyyzz`` feature will be colorized. By default
-this happens in a ``pre_linebreak_filter`` but alternatively the
-``pre_output_filter`` may be chosen, which is faster but might produce
-inconsistent output. The latter also was the default in the 1.x series
-of Luaotfload.
+this happens in a ``post_linebreak_filter`` but alternatively the
+``pre_linebreak_filter`` or ``pre_output_filter`` may be chosen, which
+is faster but might produce inconsistent output. The
+``pre_output_filter`` used to be the default in the 1.x series of
+Luaotfload, whilst later versions up to and including 2.5 hooked into
+the ``pre_linebreak_filter`` which naturally didn’t affect any glyphs
+inserting during hyphenation. Both are kept around as options to
+restore the previous behavior if necessary.
The ``definer`` allows for switching the ``define_font`` callback.
Apart from the default ``patch`` one may also choose the ``generic``
@@ -282,6 +296,31 @@ one that comes with the vanilla fontloader. Beware that this might
break tools like Fontspect that rely on the ``patch_font`` callback
provided by Luaotfload to perform important corrections on font data.
+The fontloader backend can be selected by setting the value of
+``fontloader``. The most important choices are ``default``, which will
+load the dedicated Luaotfload fontloader, and ``reference``, the
+upstream package as shipped with Luaotfload. Other than those, a file
+name accessible via kpathsea can be specified.
+
+Alternatively, the individual files that constitute the fontloader can
+be loaded directly. While less efficient, this greatly aids debugging
+since error messages will reference the actual line numbers of the
+source files and explanatory comments are not stripped. Currently,
+three distinct loading strategies are available: ``unpackaged`` will
+load the batch that is part of Luaotfload. These contain the identical
+source code that the reference fontloader has been compiled from.
+Another option, ``context`` will attempt to load the same files by
+their names in the Context format from the search path. Consequently
+this option allows to use the version of Context that comes with the
+TeX distribution. Distros tend to prefer the stable version (“current”
+in Context jargon) of those files so certain bugs encountered in the
+more bleeding edge Luaotfload can be avoided this way. A third option
+is to use ``context`` with a colon to specify a directory prefix where
+the *TEXMF* is located that the files should be loaded from, e. g.
+``context:~/context/tex/texmf-context``. This can be used when
+referencing another distribution like the Context minimals that is
+installed under a different path not indexed by kpathsea.
+
The value of ``log-level`` sets the default verbosity of messages
printed by Luaotfload. Only messages defined with a verbosity of less
than or equal to the supplied value will be output on the terminal.
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-2015-12-09.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-2015-12-09.lua
new file mode 100644
index 00000000000..f4181c686c4
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-2015-12-09.lua
@@ -0,0 +1,11552 @@
+-- merged file : /home/phg/src/latex-dev/luaotfload/src/fontloader/luaotfload-package-merged.lua
+-- parent file : /home/phg/src/latex-dev/luaotfload/src/fontloader/luaotfload-package.lua
+-- merge date : Wed Dec 9 23:19:38 2015
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['data-con']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub=string.format,string.lower,string.gsub
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end)
+local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end)
+containers=containers or {}
+local containers=containers
+containers.usecache=true
+local report_containers=logs.reporter("resolvers","containers")
+local allocated={}
+local mt={
+ __index=function(t,k)
+ if k=="writable" then
+ local writable=caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable=writable
+ return writable
+ elseif k=="readables" then
+ local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables=readables
+ return readables
+ end
+ end,
+ __storage__=true
+}
+function containers.define(category,subcategory,version,enabled)
+ if category and subcategory then
+ local c=allocated[category]
+ if not c then
+ c={}
+ allocated[category]=c
+ end
+ local s=c[subcategory]
+ if not s then
+ s={
+ category=category,
+ subcategory=subcategory,
+ storage={},
+ enabled=enabled,
+ version=version or math.pi,
+ trace=false,
+ }
+ setmetatable(s,mt)
+ c[subcategory]=s
+ end
+ return s
+ end
+end
+function containers.is_usable(container,name)
+ return container.enabled and caches and caches.is_writable(container.writable,name)
+end
+function containers.is_valid(container,name)
+ if name and name~="" then
+ local storage=container.storage[name]
+ return storage and storage.cache_version==container.version
+ else
+ return false
+ end
+end
+function containers.read(container,name)
+ local storage=container.storage
+ local stored=storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored=caches.loaddata(container.readables,name)
+ if stored and stored.cache_version==container.version then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","load",container.subcategory,name)
+ end
+ else
+ stored=nil
+ end
+ storage[name]=stored
+ elseif stored then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
+ end
+ end
+ return stored
+end
+function containers.write(container,name,data)
+ if data then
+ data.cache_version=container.version
+ if container.enabled and caches then
+ local unique,shared=data.unique,data.shared
+ data.unique,data.shared=nil,nil
+ caches.savedata(container.writable,name,data)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","save",container.subcategory,name)
+ end
+ data.unique,data.shared=unique,shared
+ end
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","store",container.subcategory,name)
+ end
+ container.storage[name]=data
+ end
+ return data
+end
+function containers.content(container,name)
+ return container.storage[name]
+end
+function containers.cleanname(name)
+ return (gsub(lower(name),"[^%w\128-\255]+","-"))
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-nod']={
+ version=1.001,
+ comment="companion to luatex-fonts.lua",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+if tex.attribute[0]~=0 then
+ texio.write_nl("log","!")
+ texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
+ texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
+ texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.")
+ texio.write_nl("log","!")
+ tex.attribute[0]=0
+end
+attributes=attributes or {}
+attributes.unsetvalue=-0x7FFFFFFF
+local numbers,last={},127
+attributes.private=attributes.private or function(name)
+ local number=numbers[name]
+ if not number then
+ if last<255 then
+ last=last+1
+ end
+ number=last
+ numbers[name]=number
+ end
+ return number
+end
+nodes={}
+nodes.pool={}
+nodes.handlers={}
+local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end
+local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end
+local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" }
+local disccodes={ [0]="discretionary","explicit","automatic","regular","first","second" }
+nodes.nodecodes=nodecodes
+nodes.whatcodes=whatcodes
+nodes.whatsitcodes=whatcodes
+nodes.glyphcodes=glyphcodes
+nodes.disccodes=disccodes
+local free_node=node.free
+local remove_node=node.remove
+local new_node=node.new
+local traverse_id=node.traverse_id
+nodes.handlers.protectglyphs=node.protect_glyphs
+nodes.handlers.unprotectglyphs=node.unprotect_glyphs
+local math_code=nodecodes.math
+local end_of_math=node.end_of_math
+function node.end_of_math(n)
+ if n.id==math_code and n.subtype==1 then
+ return n
+ else
+ return end_of_math(n)
+ end
+end
+function nodes.remove(head,current,free_too)
+ local t=current
+ head,current=remove_node(head,current)
+ if t then
+ if free_too then
+ free_node(t)
+ t=nil
+ else
+ t.next,t.prev=nil,nil
+ end
+ end
+ return head,current,t
+end
+function nodes.delete(head,current)
+ return nodes.remove(head,current,true)
+end
+function nodes.pool.kern(k)
+ local n=new_node("kern",1)
+ n.kern=k
+ return n
+end
+local getfield=node.getfield
+local setfield=node.setfield
+nodes.getfield=getfield
+nodes.setfield=setfield
+nodes.getattr=getfield
+nodes.setattr=setfield
+nodes.tostring=node.tostring or tostring
+nodes.copy=node.copy
+nodes.copy_list=node.copy_list
+nodes.delete=node.delete
+nodes.dimensions=node.dimensions
+nodes.end_of_math=node.end_of_math
+nodes.flush_list=node.flush_list
+nodes.flush_node=node.flush_node
+nodes.free=node.free
+nodes.insert_after=node.insert_after
+nodes.insert_before=node.insert_before
+nodes.hpack=node.hpack
+nodes.new=node.new
+nodes.tail=node.tail
+nodes.traverse=node.traverse
+nodes.traverse_id=node.traverse_id
+nodes.slide=node.slide
+nodes.vpack=node.vpack
+nodes.first_glyph=node.first_glyph
+nodes.first_character=node.first_character
+nodes.has_glyph=node.has_glyph or node.first_glyph
+nodes.current_attr=node.current_attr
+nodes.do_ligature_n=node.do_ligature_n
+nodes.has_field=node.has_field
+nodes.last_node=node.last_node
+nodes.usedlist=node.usedlist
+nodes.protrusion_skippable=node.protrusion_skippable
+nodes.write=node.write
+nodes.has_attribute=node.has_attribute
+nodes.set_attribute=node.set_attribute
+nodes.unset_attribute=node.unset_attribute
+nodes.protect_glyphs=node.protect_glyphs
+nodes.unprotect_glyphs=node.unprotect_glyphs
+nodes.mlist_to_hlist=node.mlist_to_hlist
+local direct=node.direct
+local nuts={}
+nodes.nuts=nuts
+local tonode=direct.tonode
+local tonut=direct.todirect
+nodes.tonode=tonode
+nodes.tonut=tonut
+nuts.tonode=tonode
+nuts.tonut=tonut
+local getfield=direct.getfield
+local setfield=direct.setfield
+nuts.getfield=getfield
+nuts.setfield=setfield
+nuts.getnext=direct.getnext
+nuts.getprev=direct.getprev
+nuts.getid=direct.getid
+nuts.getattr=getfield
+nuts.setattr=setfield
+nuts.getfont=direct.getfont
+nuts.getsubtype=direct.getsubtype
+nuts.getchar=direct.getchar
+nuts.insert_before=direct.insert_before
+nuts.insert_after=direct.insert_after
+nuts.delete=direct.delete
+nuts.copy=direct.copy
+nuts.copy_list=direct.copy_list
+nuts.tail=direct.tail
+nuts.flush_list=direct.flush_list
+nuts.free=direct.free
+nuts.remove=direct.remove
+nuts.is_node=direct.is_node
+nuts.end_of_math=direct.end_of_math
+nuts.traverse=direct.traverse
+nuts.traverse_id=direct.traverse_id
+nuts.getprop=nuts.getattr
+nuts.setprop=nuts.setattr
+local new_nut=direct.new
+nuts.new=new_nut
+nuts.pool={}
+function nuts.pool.kern(k)
+ local n=new_nut("kern",1)
+ setfield(n,"kern",k)
+ return n
+end
+local propertydata=direct.get_properties_table()
+nodes.properties={ data=propertydata }
+direct.set_properties_mode(true,true)
+function direct.set_properties_mode() end
+nuts.getprop=function(n,k)
+ local p=propertydata[n]
+ if p then
+ return p[k]
+ end
+end
+nuts.setprop=function(n,k,v)
+ if v then
+ local p=propertydata[n]
+ if p then
+ p[k]=v
+ else
+ propertydata[n]={ [k]=v }
+ end
+ end
+end
+nodes.setprop=nodes.setproperty
+nodes.getprop=nodes.getproperty
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-ini']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local report_defining=logs.reporter("fonts","defining")
+fonts=fonts or {}
+local fonts=fonts
+fonts.hashes={ identifiers=allocate() }
+fonts.tables=fonts.tables or {}
+fonts.helpers=fonts.helpers or {}
+fonts.tracers=fonts.tracers or {}
+fonts.specifiers=fonts.specifiers or {}
+fonts.analyzers={}
+fonts.readers={}
+fonts.definers={ methods={} }
+fonts.loggers={ register=function() end }
+fontloader.totable=fontloader.to_table
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-con']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,tostring,rawget=next,tostring,rawget
+local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub
+local utfbyte=utf.byte
+local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy
+local derivetable=table.derive
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end)
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local constructors=fonts.constructors or {}
+fonts.constructors=constructors
+local handlers=fonts.handlers or {}
+fonts.handlers=handlers
+local allocate=utilities.storage.allocate
+local setmetatableindex=table.setmetatableindex
+constructors.dontembed=allocate()
+constructors.autocleanup=true
+constructors.namemode="fullpath"
+constructors.version=1.01
+constructors.cache=containers.define("fonts","constructors",constructors.version,false)
+constructors.privateoffset=0xF0000
+constructors.cacheintex=true
+constructors.keys={
+ properties={
+ encodingbytes="number",
+ embedding="number",
+ cidinfo={},
+ format="string",
+ fontname="string",
+ fullname="string",
+ filename="filename",
+ psname="string",
+ name="string",
+ virtualized="boolean",
+ hasitalics="boolean",
+ autoitalicamount="basepoints",
+ nostackmath="boolean",
+ noglyphnames="boolean",
+ mode="string",
+ hasmath="boolean",
+ mathitalics="boolean",
+ textitalics="boolean",
+ finalized="boolean",
+ },
+ parameters={
+ mathsize="number",
+ scriptpercentage="float",
+ scriptscriptpercentage="float",
+ units="cardinal",
+ designsize="scaledpoints",
+ expansion={
+ stretch="integerscale",
+ shrink="integerscale",
+ step="integerscale",
+ auto="boolean",
+ },
+ protrusion={
+ auto="boolean",
+ },
+ slantfactor="float",
+ extendfactor="float",
+ factor="float",
+ hfactor="float",
+ vfactor="float",
+ size="scaledpoints",
+ units="scaledpoints",
+ scaledpoints="scaledpoints",
+ slantperpoint="scaledpoints",
+ spacing={
+ width="scaledpoints",
+ stretch="scaledpoints",
+ shrink="scaledpoints",
+ extra="scaledpoints",
+ },
+ xheight="scaledpoints",
+ quad="scaledpoints",
+ ascender="scaledpoints",
+ descender="scaledpoints",
+ synonyms={
+ space="spacing.width",
+ spacestretch="spacing.stretch",
+ spaceshrink="spacing.shrink",
+ extraspace="spacing.extra",
+ x_height="xheight",
+ space_stretch="spacing.stretch",
+ space_shrink="spacing.shrink",
+ extra_space="spacing.extra",
+ em="quad",
+ ex="xheight",
+ slant="slantperpoint",
+ },
+ },
+ description={
+ width="basepoints",
+ height="basepoints",
+ depth="basepoints",
+ boundingbox={},
+ },
+ character={
+ width="scaledpoints",
+ height="scaledpoints",
+ depth="scaledpoints",
+ italic="scaledpoints",
+ },
+}
+local designsizes=allocate()
+constructors.designsizes=designsizes
+local loadedfonts=allocate()
+constructors.loadedfonts=loadedfonts
+local factors={
+ pt=65536.0,
+ bp=65781.8,
+}
+function constructors.setfactor(f)
+ constructors.factor=factors[f or 'pt'] or factors.pt
+end
+constructors.setfactor()
+function constructors.scaled(scaledpoints,designsize)
+ if scaledpoints<0 then
+ if designsize then
+ local factor=constructors.factor
+ if designsize>factor then
+ return (- scaledpoints/1000)*designsize
+ else
+ return (- scaledpoints/1000)*designsize*factor
+ end
+ else
+ return (- scaledpoints/1000)*10*factor
+ end
+ else
+ return scaledpoints
+ end
+end
+function constructors.cleanuptable(tfmdata)
+ if constructors.autocleanup and tfmdata.properties.virtualized then
+ for k,v in next,tfmdata.characters do
+ if v.commands then v.commands=nil end
+ end
+ end
+end
+function constructors.calculatescale(tfmdata,scaledpoints)
+ local parameters=tfmdata.parameters
+ if scaledpoints<0 then
+ scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize)
+ end
+ return scaledpoints,scaledpoints/(parameters.units or 1000)
+end
+local unscaled={
+ ScriptPercentScaleDown=true,
+ ScriptScriptPercentScaleDown=true,
+ RadicalDegreeBottomRaisePercent=true
+}
+function constructors.assignmathparameters(target,original)
+ local mathparameters=original.mathparameters
+ if mathparameters and next(mathparameters) then
+ local targetparameters=target.parameters
+ local targetproperties=target.properties
+ local targetmathparameters={}
+ local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor
+ for name,value in next,mathparameters do
+ if unscaled[name] then
+ targetmathparameters[name]=value
+ else
+ targetmathparameters[name]=value*factor
+ end
+ end
+ if not targetmathparameters.FractionDelimiterSize then
+ targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size
+ end
+ if not mathparameters.FractionDelimiterDisplayStyleSize then
+ targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size
+ end
+ target.mathparameters=targetmathparameters
+ end
+end
+function constructors.beforecopyingcharacters(target,original)
+end
+function constructors.aftercopyingcharacters(target,original)
+end
+constructors.sharefonts=false
+constructors.nofsharedfonts=0
+local sharednames={}
+function constructors.trytosharefont(target,tfmdata)
+ if constructors.sharefonts then
+ local characters=target.characters
+ local n=1
+ local t={ target.psname }
+ local u=sortedkeys(characters)
+ for i=1,#u do
+ local k=u[i]
+ n=n+1;t[n]=k
+ n=n+1;t[n]=characters[k].index or k
+ end
+ local h=md5.HEX(concat(t," "))
+ local s=sharednames[h]
+ if s then
+ if trace_defining then
+ report_defining("font %a uses backend resources of font %a",target.fullname,s)
+ end
+ target.fullname=s
+ constructors.nofsharedfonts=constructors.nofsharedfonts+1
+ target.properties.sharedwith=s
+ else
+ sharednames[h]=target.fullname
+ end
+ end
+end
+function constructors.enhanceparameters(parameters)
+ local xheight=parameters.x_height
+ local quad=parameters.quad
+ local space=parameters.space
+ local stretch=parameters.space_stretch
+ local shrink=parameters.space_shrink
+ local extra=parameters.extra_space
+ local slant=parameters.slant
+ parameters.xheight=xheight
+ parameters.spacestretch=stretch
+ parameters.spaceshrink=shrink
+ parameters.extraspace=extra
+ parameters.em=quad
+ parameters.ex=xheight
+ parameters.slantperpoint=slant
+ parameters.spacing={
+ width=space,
+ stretch=stretch,
+ shrink=shrink,
+ extra=extra,
+ }
+end
+function constructors.scale(tfmdata,specification)
+ local target={}
+ if tonumber(specification) then
+ specification={ size=specification }
+ end
+ target.specification=specification
+ local scaledpoints=specification.size
+ local relativeid=specification.relativeid
+ local properties=tfmdata.properties or {}
+ local goodies=tfmdata.goodies or {}
+ local resources=tfmdata.resources or {}
+ local descriptions=tfmdata.descriptions or {}
+ local characters=tfmdata.characters or {}
+ local changed=tfmdata.changed or {}
+ local shared=tfmdata.shared or {}
+ local parameters=tfmdata.parameters or {}
+ local mathparameters=tfmdata.mathparameters or {}
+ local targetcharacters={}
+ local targetdescriptions=derivetable(descriptions)
+ local targetparameters=derivetable(parameters)
+ local targetproperties=derivetable(properties)
+ local targetgoodies=goodies
+ target.characters=targetcharacters
+ target.descriptions=targetdescriptions
+ target.parameters=targetparameters
+ target.properties=targetproperties
+ target.goodies=targetgoodies
+ target.shared=shared
+ target.resources=resources
+ target.unscaled=tfmdata
+ local mathsize=tonumber(specification.mathsize) or 0
+ local textsize=tonumber(specification.textsize) or scaledpoints
+ local forcedsize=tonumber(parameters.mathsize ) or 0
+ local extrafactor=tonumber(specification.factor ) or 1
+ if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then
+ scaledpoints=parameters.scriptpercentage*textsize/100
+ elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then
+ scaledpoints=parameters.scriptscriptpercentage*textsize/100
+ elseif forcedsize>1000 then
+ scaledpoints=forcedsize
+ end
+ targetparameters.mathsize=mathsize
+ targetparameters.textsize=textsize
+ targetparameters.forcedsize=forcedsize
+ targetparameters.extrafactor=extrafactor
+ local tounicode=fonts.mappings.tounicode
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local units=parameters.units or 1000
+ if target.fonts then
+ target.fonts=fastcopy(target.fonts)
+ end
+ targetproperties.language=properties.language or "dflt"
+ targetproperties.script=properties.script or "dflt"
+ targetproperties.mode=properties.mode or "base"
+ local askedscaledpoints=scaledpoints
+ local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints,nil,specification)
+ local hdelta=delta
+ local vdelta=delta
+ target.designsize=parameters.designsize
+ target.units_per_em=units
+ local direction=properties.direction or tfmdata.direction or 0
+ target.direction=direction
+ properties.direction=direction
+ target.size=scaledpoints
+ target.encodingbytes=properties.encodingbytes or 1
+ target.embedding=properties.embedding or "subset"
+ target.tounicode=1
+ target.cidinfo=properties.cidinfo
+ target.format=properties.format
+ target.cache=constructors.cacheintex and "yes" or "renew"
+ local fontname=properties.fontname or tfmdata.fontname
+ local fullname=properties.fullname or tfmdata.fullname
+ local filename=properties.filename or tfmdata.filename
+ local psname=properties.psname or tfmdata.psname
+ local name=properties.name or tfmdata.name
+ if not psname or psname=="" then
+ psname=fontname or (fullname and fonts.names.cleanname(fullname))
+ end
+ target.fontname=fontname
+ target.fullname=fullname
+ target.filename=filename
+ target.psname=psname
+ target.name=name
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.filename=filename
+ properties.psname=psname
+ properties.name=name
+ local expansion=parameters.expansion
+ if expansion then
+ target.stretch=expansion.stretch
+ target.shrink=expansion.shrink
+ target.step=expansion.step
+ target.auto_expand=expansion.auto
+ end
+ local protrusion=parameters.protrusion
+ if protrusion then
+ target.auto_protrude=protrusion.auto
+ end
+ local extendfactor=parameters.extendfactor or 0
+ if extendfactor~=0 and extendfactor~=1 then
+ hdelta=hdelta*extendfactor
+ target.extend=extendfactor*1000
+ else
+ target.extend=1000
+ end
+ local slantfactor=parameters.slantfactor or 0
+ if slantfactor~=0 then
+ target.slant=slantfactor*1000
+ else
+ target.slant=0
+ end
+ targetparameters.factor=delta
+ targetparameters.hfactor=hdelta
+ targetparameters.vfactor=vdelta
+ targetparameters.size=scaledpoints
+ targetparameters.units=units
+ targetparameters.scaledpoints=askedscaledpoints
+ local isvirtual=properties.virtualized or tfmdata.type=="virtual"
+ local hasquality=target.auto_expand or target.auto_protrude
+ local hasitalics=properties.hasitalics
+ local autoitalicamount=properties.autoitalicamount
+ local stackmath=not properties.nostackmath
+ local nonames=properties.noglyphnames
+ local haskerns=properties.haskerns or properties.mode=="base"
+ local hasligatures=properties.hasligatures or properties.mode=="base"
+ local realdimensions=properties.realdimensions
+ if changed and not next(changed) then
+ changed=false
+ end
+ target.type=isvirtual and "virtual" or "real"
+ target.postprocessors=tfmdata.postprocessors
+ local targetslant=(parameters.slant or parameters[1] or 0)*factors.pt
+ local targetspace=(parameters.space or parameters[2] or 0)*hdelta
+ local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta
+ local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta
+ local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta
+ local targetquad=(parameters.quad or parameters[6] or 0)*hdelta
+ local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta
+ targetparameters.slant=targetslant
+ targetparameters.space=targetspace
+ targetparameters.space_stretch=targetspace_stretch
+ targetparameters.space_shrink=targetspace_shrink
+ targetparameters.x_height=targetx_height
+ targetparameters.quad=targetquad
+ targetparameters.extra_space=targetextra_space
+ local ascender=parameters.ascender
+ if ascender then
+ targetparameters.ascender=delta*ascender
+ end
+ local descender=parameters.descender
+ if descender then
+ targetparameters.descender=delta*descender
+ end
+ constructors.enhanceparameters(targetparameters)
+ local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0
+ local scaledwidth=defaultwidth*hdelta
+ local scaledheight=defaultheight*vdelta
+ local scaleddepth=defaultdepth*vdelta
+ local hasmath=(properties.hasmath or next(mathparameters)) and true
+ if hasmath then
+ constructors.assignmathparameters(target,tfmdata)
+ properties.hasmath=true
+ target.nomath=false
+ target.MathConstants=target.mathparameters
+ else
+ properties.hasmath=false
+ target.nomath=true
+ target.mathparameters=nil
+ end
+ local italickey="italic"
+ local useitalics=true
+ if hasmath then
+ autoitalicamount=false
+ elseif properties.textitalics then
+ italickey="italic_correction"
+ useitalics=false
+ if properties.delaytextitalics then
+ autoitalicamount=false
+ end
+ end
+ if trace_defining then
+ report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a",
+ name,fullname,filename,hdelta,vdelta,
+ hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled")
+ end
+ constructors.beforecopyingcharacters(target,tfmdata)
+ local sharedkerns={}
+ for unicode,character in next,characters do
+ local chr,description,index
+ if changed then
+ local c=changed[unicode]
+ if c then
+ description=descriptions[c] or descriptions[unicode] or character
+ character=characters[c] or character
+ index=description.index or c
+ else
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ end
+ else
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ end
+ local width=description.width
+ local height=description.height
+ local depth=description.depth
+ if realdimensions then
+ if not height or height==0 then
+ local bb=description.boundingbox
+ local ht=bb[4]
+ if ht~=0 then
+ height=ht
+ end
+ if not depth or depth==0 then
+ local dp=-bb[2]
+ if dp~=0 then
+ depth=dp
+ end
+ end
+ elseif not depth or depth==0 then
+ local dp=-description.boundingbox[2]
+ if dp~=0 then
+ depth=dp
+ end
+ end
+ end
+ if width then width=hdelta*width else width=scaledwidth end
+ if height then height=vdelta*height else height=scaledheight end
+ if depth and depth~=0 then
+ depth=delta*depth
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ end
+ else
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ width=width,
+ }
+ end
+ end
+ local isunicode=description.unicode
+ if isunicode then
+ chr.unicode=isunicode
+ chr.tounicode=tounicode(isunicode)
+ end
+ if hasquality then
+ local ve=character.expansion_factor
+ if ve then
+ chr.expansion_factor=ve*1000
+ end
+ local vl=character.left_protruding
+ if vl then
+ chr.left_protruding=protrusionfactor*width*vl
+ end
+ local vr=character.right_protruding
+ if vr then
+ chr.right_protruding=protrusionfactor*width*vr
+ end
+ end
+ if autoitalicamount then
+ local vi=description.italic
+ if not vi then
+ local vi=description.boundingbox[3]-description.width+autoitalicamount
+ if vi>0 then
+ chr[italickey]=vi*hdelta
+ end
+ elseif vi~=0 then
+ chr[italickey]=vi*hdelta
+ end
+ elseif hasitalics then
+ local vi=description.italic
+ if vi and vi~=0 then
+ chr[italickey]=vi*hdelta
+ end
+ end
+ if hasmath then
+ local vn=character.next
+ if vn then
+ chr.next=vn
+ else
+ local vv=character.vert_variants
+ if vv then
+ local t={}
+ for i=1,#vv do
+ local vvi=vv[i]
+ t[i]={
+ ["start"]=(vvi["start"] or 0)*vdelta,
+ ["end"]=(vvi["end"] or 0)*vdelta,
+ ["advance"]=(vvi["advance"] or 0)*vdelta,
+ ["extender"]=vvi["extender"],
+ ["glyph"]=vvi["glyph"],
+ }
+ end
+ chr.vert_variants=t
+ else
+ local hv=character.horiz_variants
+ if hv then
+ local t={}
+ for i=1,#hv do
+ local hvi=hv[i]
+ t[i]={
+ ["start"]=(hvi["start"] or 0)*hdelta,
+ ["end"]=(hvi["end"] or 0)*hdelta,
+ ["advance"]=(hvi["advance"] or 0)*hdelta,
+ ["extender"]=hvi["extender"],
+ ["glyph"]=hvi["glyph"],
+ }
+ end
+ chr.horiz_variants=t
+ end
+ end
+ end
+ local va=character.top_accent
+ if va then
+ chr.top_accent=vdelta*va
+ end
+ if stackmath then
+ local mk=character.mathkerns
+ if mk then
+ local kerns={}
+ local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_right=k end
+ local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_left=k end
+ local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_left=k end
+ local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_right=k end
+ chr.mathkern=kerns
+ end
+ end
+ end
+ if haskerns then
+ local vk=character.kerns
+ if vk then
+ local s=sharedkerns[vk]
+ if not s then
+ s={}
+ for k,v in next,vk do s[k]=v*hdelta end
+ sharedkerns[vk]=s
+ end
+ chr.kerns=s
+ end
+ end
+ if hasligatures then
+ local vl=character.ligatures
+ if vl then
+ if true then
+ chr.ligatures=vl
+ else
+ local tt={}
+ for i,l in next,vl do
+ tt[i]=l
+ end
+ chr.ligatures=tt
+ end
+ end
+ end
+ if isvirtual then
+ local vc=character.commands
+ if vc then
+ local ok=false
+ for i=1,#vc do
+ local key=vc[i][1]
+ if key=="right" or key=="down" then
+ ok=true
+ break
+ end
+ end
+ if ok then
+ local tt={}
+ for i=1,#vc do
+ local ivc=vc[i]
+ local key=ivc[1]
+ if key=="right" then
+ tt[i]={ key,ivc[2]*hdelta }
+ elseif key=="down" then
+ tt[i]={ key,ivc[2]*vdelta }
+ elseif key=="rule" then
+ tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta }
+ else
+ tt[i]=ivc
+ end
+ end
+ chr.commands=tt
+ else
+ chr.commands=vc
+ end
+ chr.index=nil
+ end
+ end
+ targetcharacters[unicode]=chr
+ end
+ constructors.aftercopyingcharacters(target,tfmdata)
+ constructors.trytosharefont(target,tfmdata)
+ return target
+end
+function constructors.finalize(tfmdata)
+ if tfmdata.properties and tfmdata.properties.finalized then
+ return
+ end
+ if not tfmdata.characters then
+ return nil
+ end
+ if not tfmdata.goodies then
+ tfmdata.goodies={}
+ end
+ local parameters=tfmdata.parameters
+ if not parameters then
+ return nil
+ end
+ if not parameters.expansion then
+ parameters.expansion={
+ stretch=tfmdata.stretch or 0,
+ shrink=tfmdata.shrink or 0,
+ step=tfmdata.step or 0,
+ auto=tfmdata.auto_expand or false,
+ }
+ end
+ if not parameters.protrusion then
+ parameters.protrusion={
+ auto=auto_protrude
+ }
+ end
+ if not parameters.size then
+ parameters.size=tfmdata.size
+ end
+ if not parameters.extendfactor then
+ parameters.extendfactor=tfmdata.extend or 0
+ end
+ if not parameters.slantfactor then
+ parameters.slantfactor=tfmdata.slant or 0
+ end
+ if not parameters.designsize then
+ parameters.designsize=tfmdata.designsize or (factors.pt*10)
+ end
+ if not parameters.units then
+ parameters.units=tfmdata.units_per_em or 1000
+ end
+ if not tfmdata.descriptions then
+ local descriptions={}
+ setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end)
+ tfmdata.descriptions=descriptions
+ end
+ local properties=tfmdata.properties
+ if not properties then
+ properties={}
+ tfmdata.properties=properties
+ end
+ if not properties.virtualized then
+ properties.virtualized=tfmdata.type=="virtual"
+ end
+ if not tfmdata.properties then
+ tfmdata.properties={
+ fontname=tfmdata.fontname,
+ filename=tfmdata.filename,
+ fullname=tfmdata.fullname,
+ name=tfmdata.name,
+ psname=tfmdata.psname,
+ encodingbytes=tfmdata.encodingbytes or 1,
+ embedding=tfmdata.embedding or "subset",
+ tounicode=tfmdata.tounicode or 1,
+ cidinfo=tfmdata.cidinfo or nil,
+ format=tfmdata.format or "type1",
+ direction=tfmdata.direction or 0,
+ }
+ end
+ if not tfmdata.resources then
+ tfmdata.resources={}
+ end
+ if not tfmdata.shared then
+ tfmdata.shared={}
+ end
+ if not properties.hasmath then
+ properties.hasmath=not tfmdata.nomath
+ end
+ tfmdata.MathConstants=nil
+ tfmdata.postprocessors=nil
+ tfmdata.fontname=nil
+ tfmdata.filename=nil
+ tfmdata.fullname=nil
+ tfmdata.name=nil
+ tfmdata.psname=nil
+ tfmdata.encodingbytes=nil
+ tfmdata.embedding=nil
+ tfmdata.tounicode=nil
+ tfmdata.cidinfo=nil
+ tfmdata.format=nil
+ tfmdata.direction=nil
+ tfmdata.type=nil
+ tfmdata.nomath=nil
+ tfmdata.designsize=nil
+ tfmdata.size=nil
+ tfmdata.stretch=nil
+ tfmdata.shrink=nil
+ tfmdata.step=nil
+ tfmdata.auto_expand=nil
+ tfmdata.auto_protrude=nil
+ tfmdata.extend=nil
+ tfmdata.slant=nil
+ tfmdata.units_per_em=nil
+ tfmdata.cache=nil
+ properties.finalized=true
+ return tfmdata
+end
+local hashmethods={}
+constructors.hashmethods=hashmethods
+function constructors.hashfeatures(specification)
+ local features=specification.features
+ if features then
+ local t,tn={},0
+ for category,list in next,features do
+ if next(list) then
+ local hasher=hashmethods[category]
+ if hasher then
+ local hash=hasher(list)
+ if hash then
+ tn=tn+1
+ t[tn]=category..":"..hash
+ end
+ end
+ end
+ end
+ if tn>0 then
+ return concat(t," & ")
+ end
+ end
+ return "unknown"
+end
+hashmethods.normal=function(list)
+ local s={}
+ local n=0
+ for k,v in next,list do
+ if not k then
+ elseif k=="number" or k=="features" then
+ else
+ n=n+1
+ s[n]=k
+ end
+ end
+ if n>0 then
+ sort(s)
+ for i=1,n do
+ local k=s[i]
+ s[i]=k..'='..tostring(list[k])
+ end
+ return concat(s,"+")
+ end
+end
+function constructors.hashinstance(specification,force)
+ local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks
+ if force or not hash then
+ hash=constructors.hashfeatures(specification)
+ specification.hash=hash
+ end
+ if size<1000 and designsizes[hash] then
+ size=math.round(constructors.scaled(size,designsizes[hash]))
+ specification.size=size
+ end
+ if fallbacks then
+ return hash..' @ '..tostring(size)..' @ '..fallbacks
+ else
+ return hash..' @ '..tostring(size)
+ end
+end
+function constructors.setname(tfmdata,specification)
+ if constructors.namemode=="specification" then
+ local specname=specification.specification
+ if specname then
+ tfmdata.properties.name=specname
+ if trace_defining then
+ report_otf("overloaded fontname %a",specname)
+ end
+ end
+ end
+end
+function constructors.checkedfilename(data)
+ local foundfilename=data.foundfilename
+ if not foundfilename then
+ local askedfilename=data.filename or ""
+ if askedfilename~="" then
+ askedfilename=resolvers.resolve(askedfilename)
+ foundfilename=resolvers.findbinfile(askedfilename,"") or ""
+ if foundfilename=="" then
+ report_defining("source file %a is not found",askedfilename)
+ foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or ""
+ if foundfilename~="" then
+ report_defining("using source file %a due to cache mismatch",foundfilename)
+ end
+ end
+ end
+ data.foundfilename=foundfilename
+ end
+ return foundfilename
+end
+local formats=allocate()
+fonts.formats=formats
+setmetatableindex(formats,function(t,k)
+ local l=lower(k)
+ if rawget(t,k) then
+ t[k]=l
+ return l
+ end
+ return rawget(t,file.suffix(l))
+end)
+local locations={}
+local function setindeed(mode,target,group,name,action,position)
+ local t=target[mode]
+ if not t then
+ report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode)
+ os.exit()
+ elseif position then
+ insert(t,position,{ name=name,action=action })
+ else
+ for i=1,#t do
+ local ti=t[i]
+ if ti.name==name then
+ ti.action=action
+ return
+ end
+ end
+ insert(t,{ name=name,action=action })
+ end
+end
+local function set(group,name,target,source)
+ target=target[group]
+ if not target then
+ report_defining("fatal target error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local source=source[group]
+ if not source then
+ report_defining("fatal source error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local node=source.node
+ local base=source.base
+ local position=source.position
+ if node then
+ setindeed("node",target,group,name,node,position)
+ end
+ if base then
+ setindeed("base",target,group,name,base,position)
+ end
+end
+local function register(where,specification)
+ local name=specification.name
+ if name and name~="" then
+ local default=specification.default
+ local description=specification.description
+ local initializers=specification.initializers
+ local processors=specification.processors
+ local manipulators=specification.manipulators
+ local modechecker=specification.modechecker
+ if default then
+ where.defaults[name]=default
+ end
+ if description and description~="" then
+ where.descriptions[name]=description
+ end
+ if initializers then
+ set('initializers',name,where,specification)
+ end
+ if processors then
+ set('processors',name,where,specification)
+ end
+ if manipulators then
+ set('manipulators',name,where,specification)
+ end
+ if modechecker then
+ where.modechecker=modechecker
+ end
+ end
+end
+constructors.registerfeature=register
+function constructors.getfeatureaction(what,where,mode,name)
+ what=handlers[what].features
+ if what then
+ where=what[where]
+ if where then
+ mode=where[mode]
+ if mode then
+ for i=1,#mode do
+ local m=mode[i]
+ if m.name==name then
+ return m.action
+ end
+ end
+ end
+ end
+ end
+end
+function constructors.newhandler(what)
+ local handler=handlers[what]
+ if not handler then
+ handler={}
+ handlers[what]=handler
+ end
+ return handler
+end
+function constructors.newfeatures(what)
+ local handler=handlers[what]
+ local features=handler.features
+ if not features then
+ local tables=handler.tables
+ local statistics=handler.statistics
+ features=allocate {
+ defaults={},
+ descriptions=tables and tables.features or {},
+ used=statistics and statistics.usedfeatures or {},
+ initializers={ base={},node={} },
+ processors={ base={},node={} },
+ manipulators={ base={},node={} },
+ }
+ features.register=function(specification) return register(features,specification) end
+ handler.features=features
+ end
+ return features
+end
+function constructors.checkedfeatures(what,features)
+ local defaults=handlers[what].features.defaults
+ if features and next(features) then
+ features=fastcopy(features)
+ for key,value in next,defaults do
+ if features[key]==nil then
+ features[key]=value
+ end
+ end
+ return features
+ else
+ return fastcopy(defaults)
+ end
+end
+function constructors.initializefeatures(what,tfmdata,features,trace,report)
+ if features and next(features) then
+ local properties=tfmdata.properties or {}
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatinitializers=whatfeatures.initializers
+ local whatmodechecker=whatfeatures.modechecker
+ local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base"
+ properties.mode=mode
+ features.mode=mode
+ local done={}
+ while true do
+ local redo=false
+ local initializers=whatfeatures.initializers[mode]
+ if initializers then
+ for i=1,#initializers do
+ local step=initializers[i]
+ local feature=step.name
+ local value=features[feature]
+ if not value then
+ elseif done[feature] then
+ else
+ local action=step.action
+ if trace then
+ report("initializing feature %a to %a for mode %a for font %a",feature,
+ value,mode,tfmdata.properties.fullname)
+ end
+ action(tfmdata,value,features)
+ if mode~=properties.mode or mode~=features.mode then
+ if whatmodechecker then
+ properties.mode=whatmodechecker(tfmdata,features,properties.mode)
+ features.mode=properties.mode
+ end
+ if mode~=properties.mode then
+ mode=properties.mode
+ redo=true
+ end
+ end
+ done[feature]=true
+ end
+ if redo then
+ break
+ end
+ end
+ if not redo then
+ break
+ end
+ else
+ break
+ end
+ end
+ properties.mode=mode
+ return true
+ else
+ return false
+ end
+end
+function constructors.collectprocessors(what,tfmdata,features,trace,report)
+ local processes,nofprocesses={},0
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatprocessors=whatfeatures.processors
+ local mode=properties.mode
+ local processors=whatprocessors[mode]
+ if processors then
+ for i=1,#processors do
+ local step=processors[i]
+ local feature=step.name
+ if features[feature] then
+ local action=step.action
+ if trace then
+ report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
+ end
+ if action then
+ nofprocesses=nofprocesses+1
+ processes[nofprocesses]=action
+ end
+ end
+ end
+ elseif trace then
+ report("no feature processors for mode %a for font %a",mode,properties.fullname)
+ end
+ end
+ return processes
+end
+function constructors.applymanipulators(what,tfmdata,features,trace,report)
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatmanipulators=whatfeatures.manipulators
+ local mode=properties.mode
+ local manipulators=whatmanipulators[mode]
+ if manipulators then
+ for i=1,#manipulators do
+ local step=manipulators[i]
+ local feature=step.name
+ local value=features[feature]
+ if value then
+ local action=step.action
+ if trace then
+ report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname)
+ end
+ if action then
+ action(tfmdata,feature,value)
+ end
+ end
+ end
+ end
+ end
+end
+function constructors.addcoreunicodes(unicodes)
+ if not unicodes then
+ unicodes={}
+ end
+ unicodes.space=0x0020
+ unicodes.hyphen=0x002D
+ unicodes.zwj=0x200D
+ unicodes.zwnj=0x200C
+ return unicodes
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-font-enc']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.encodings={}
+fonts.encodings.agl={}
+fonts.encodings.known={}
+setmetatable(fonts.encodings.agl,{ __index=function(t,k)
+ if k=="unicodes" then
+ texio.write(" <loading (extended) adobe glyph list>")
+ local unicodes=dofile(resolvers.findfile("font-age.lua"))
+ fonts.encodings.agl={ unicodes=unicodes }
+ return unicodes
+ else
+ return nil
+ end
+end })
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-cid']={
+ version=1.001,
+ comment="companion to font-otf.lua (cidmaps)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,match,lower=string.format,string.match,string.lower
+local tonumber=tonumber
+local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match
+local fonts,logs,trackers=fonts,logs,trackers
+local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local cid={}
+fonts.cid=cid
+local cidmap={}
+local cidmax=10
+local number=C(R("09","af","AF")^1)
+local space=S(" \n\r\t")
+local spaces=space^0
+local period=P(".")
+local periods=period*period
+local name=P("/")*C((1-space)^1)
+local unicodes,names={},{}
+local function do_one(a,b)
+ unicodes[tonumber(a)]=tonumber(b,16)
+end
+local function do_range(a,b,c)
+ c=tonumber(c,16)
+ for i=tonumber(a),tonumber(b) do
+ unicodes[i]=c
+ c=c+1
+ end
+end
+local function do_name(a,b)
+ names[tonumber(a)]=b
+end
+local grammar=P { "start",
+ start=number*spaces*number*V("series"),
+ series=(spaces*(V("one")+V("range")+V("named")))^1,
+ one=(number*spaces*number)/do_one,
+ range=(number*periods*number*spaces*number)/do_range,
+ named=(number*spaces*name)/do_name
+}
+local function loadcidfile(filename)
+ local data=io.loaddata(filename)
+ if data then
+ unicodes,names={},{}
+ lpegmatch(grammar,data)
+ local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$")
+ return {
+ supplement=supplement,
+ registry=registry,
+ ordering=ordering,
+ filename=filename,
+ unicodes=unicodes,
+ names=names,
+ }
+ end
+end
+cid.loadfile=loadcidfile
+local template="%s-%s-%s.cidmap"
+local function locate(registry,ordering,supplement)
+ local filename=format(template,registry,ordering,supplement)
+ local hashname=lower(filename)
+ local found=cidmap[hashname]
+ if not found then
+ if trace_loading then
+ report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename)
+ end
+ local fullname=resolvers.findfile(filename,'cid') or ""
+ if fullname~="" then
+ found=loadcidfile(fullname)
+ if found then
+ if trace_loading then
+ report_otf("using cidmap file %a",filename)
+ end
+ cidmap[hashname]=found
+ found.usedname=file.basename(filename)
+ end
+ end
+ end
+ return found
+end
+function cid.getmap(specification)
+ if not specification then
+ report_otf("invalid cidinfo specification, table expected")
+ return
+ end
+ local registry=specification.registry
+ local ordering=specification.ordering
+ local supplement=specification.supplement
+ local filename=format(registry,ordering,supplement)
+ local lowername=lower(filename)
+ local found=cidmap[lowername]
+ if found then
+ return found
+ end
+ if ordering=="Identity" then
+ local found={
+ supplement=supplement,
+ registry=registry,
+ ordering=ordering,
+ filename=filename,
+ unicodes={},
+ names={},
+ }
+ cidmap[lowername]=found
+ return found
+ end
+ if trace_loading then
+ report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement)
+ end
+ found=locate(registry,ordering,supplement)
+ if not found then
+ local supnum=tonumber(supplement)
+ local cidnum=nil
+ if supnum<cidmax then
+ for s=supnum+1,cidmax do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ if not found and supnum>0 then
+ for s=supnum-1,0,-1 do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ registry=lower(registry)
+ ordering=lower(ordering)
+ if found and cidnum>0 then
+ for s=0,cidnum-1 do
+ local filename=format(template,registry,ordering,s)
+ if not cidmap[filename] then
+ cidmap[filename]=found
+ end
+ end
+ end
+ end
+ return found
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-map']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local tonumber,next,type=tonumber,next,type
+local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower
+local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match
+local utfbyte=utf.byte
+local floor=math.floor
+local formatters=string.formatters
+local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end)
+local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end)
+local report_fonts=logs.reporter("fonts","loading")
+local fonts=fonts or {}
+local mappings=fonts.mappings or {}
+fonts.mappings=mappings
+local allocate=utilities.storage.allocate
+local function loadlumtable(filename)
+ local lumname=file.replacesuffix(file.basename(filename),"lum")
+ local lumfile=resolvers.findfile(lumname,"map") or ""
+ if lumfile~="" and lfs.isfile(lumfile) then
+ if trace_loading or trace_mapping then
+ report_fonts("loading map table %a",lumfile)
+ end
+ lumunic=dofile(lumfile)
+ return lumunic,lumfile
+ end
+end
+local hex=R("AF","09")
+local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end
+local hexsix=(hex*hex*hex*hex*hex*hex)/function(s) return tonumber(s,16) end
+local dec=(R("09")^1)/tonumber
+local period=P(".")
+local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true))
+local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true))
+local index=P("index")*dec*Cc(false)
+local parser=unicode+ucode+index
+local parsers={}
+local function makenameparser(str)
+ if not str or str=="" then
+ return parser
+ else
+ local p=parsers[str]
+ if not p then
+ p=P(str)*period*dec*Cc(false)
+ parsers[str]=p
+ end
+ return p
+ end
+end
+local f_single=formatters["%04X"]
+local f_double=formatters["%04X%04X"]
+local function tounicode16(unicode,name)
+ if unicode<0x10000 then
+ return f_single(unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ return f_double(floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a in %a into tounicode",unicode,name)
+ end
+end
+local function tounicode16sequence(unicodes,name)
+ local t={}
+ for l=1,#unicodes do
+ local u=unicodes[l]
+ if u<0x10000 then
+ t[l]=f_single(u)
+ elseif unicode<0x1FFFFFFFFF then
+ t[l]=f_double(floor(u/1024),u%1024+0xDC00)
+ else
+ report_fonts ("can't convert %a in %a into tounicode",u,name)
+ return
+ end
+ end
+ return concat(t)
+end
+local function tounicode(unicode,name)
+ if type(unicode)=="table" then
+ local t={}
+ for l=1,#unicode do
+ local u=unicode[l]
+ if u<0x10000 then
+ t[l]=f_single(u)
+ elseif u<0x1FFFFFFFFF then
+ t[l]=f_double(floor(u/1024),u%1024+0xDC00)
+ else
+ report_fonts ("can't convert %a in %a into tounicode",u,name)
+ return
+ end
+ end
+ return concat(t)
+ else
+ if unicode<0x10000 then
+ return f_single(unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ return f_double(floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a in %a into tounicode",unicode,name)
+ end
+ end
+end
+local function fromunicode16(str)
+ if #str==4 then
+ return tonumber(str,16)
+ else
+ local l,r=match(str,"(....)(....)")
+ return (tonumber(l,16))*0x400+tonumber(r,16)-0xDC00
+ end
+end
+mappings.loadlumtable=loadlumtable
+mappings.makenameparser=makenameparser
+mappings.tounicode=tounicode
+mappings.tounicode16=tounicode16
+mappings.tounicode16sequence=tounicode16sequence
+mappings.fromunicode16=fromunicode16
+local ligseparator=P("_")
+local varseparator=P(".")
+local namesplitter=Ct(C((1-ligseparator-varseparator)^1)*(ligseparator*C((1-ligseparator-varseparator)^1))^0)
+local overloads=allocate {
+ IJ={ name="I_J",unicode={ 0x49,0x4A },mess=0x0132 },
+ ij={ name="i_j",unicode={ 0x69,0x6A },mess=0x0133 },
+ ff={ name="f_f",unicode={ 0x66,0x66 },mess=0xFB00 },
+ fi={ name="f_i",unicode={ 0x66,0x69 },mess=0xFB01 },
+ fl={ name="f_l",unicode={ 0x66,0x6C },mess=0xFB02 },
+ ffi={ name="f_f_i",unicode={ 0x66,0x66,0x69 },mess=0xFB03 },
+ ffl={ name="f_f_l",unicode={ 0x66,0x66,0x6C },mess=0xFB04 },
+ fj={ name="f_j",unicode={ 0x66,0x6A } },
+ fk={ name="f_k",unicode={ 0x66,0x6B } },
+}
+for k,v in next,overloads do
+ local name=v.name
+ local mess=v.mess
+ if name then
+ overloads[name]=v
+ end
+ if mess then
+ overloads[mess]=v
+ end
+end
+mappings.overloads=overloads
+function mappings.addtounicode(data,filename)
+ local resources=data.resources
+ local properties=data.properties
+ local descriptions=data.descriptions
+ local unicodes=resources.unicodes
+ local lookuptypes=resources.lookuptypes
+ if not unicodes then
+ return
+ end
+ unicodes['space']=unicodes['space'] or 32
+ unicodes['hyphen']=unicodes['hyphen'] or 45
+ unicodes['zwj']=unicodes['zwj'] or 0x200D
+ unicodes['zwnj']=unicodes['zwnj'] or 0x200C
+ local private=fonts.constructors.privateoffset
+ local unicodevector=fonts.encodings.agl.unicodes
+ local missing={}
+ local lumunic,uparser,oparser
+ local cidinfo,cidnames,cidcodes,usedmap
+ cidinfo=properties.cidinfo
+ usedmap=cidinfo and fonts.cid.getmap(cidinfo)
+ if usedmap then
+ oparser=usedmap and makenameparser(cidinfo.ordering)
+ cidnames=usedmap.names
+ cidcodes=usedmap.unicodes
+ end
+ uparser=makenameparser()
+ local ns,nl=0,0
+ for unic,glyph in next,descriptions do
+ local index=glyph.index
+ local name=glyph.name
+ local r=overloads[name]
+ if r then
+ glyph.unicode=r.unicode
+ elseif unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
+ local unicode=lumunic and lumunic[name] or unicodevector[name]
+ if unicode then
+ glyph.unicode=unicode
+ ns=ns+1
+ end
+ if (not unicode) and usedmap then
+ local foundindex=lpegmatch(oparser,name)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ glyph.unicode=unicode
+ ns=ns+1
+ else
+ local reference=cidnames[foundindex]
+ if reference then
+ local foundindex=lpegmatch(oparser,reference)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ glyph.unicode=unicode
+ ns=ns+1
+ end
+ end
+ if not unicode or unicode=="" then
+ local foundcodes,multiple=lpegmatch(uparser,reference)
+ if foundcodes then
+ glyph.unicode=foundcodes
+ if multiple then
+ nl=nl+1
+ unicode=true
+ else
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if not unicode or unicode=="" then
+ local split=lpegmatch(namesplitter,name)
+ local nsplit=split and #split or 0
+ local t,n={},0
+ unicode=true
+ for l=1,nsplit do
+ local base=split[l]
+ local u=unicodes[base] or unicodevector[base]
+ if not u then
+ break
+ elseif type(u)=="table" then
+ if u[1]>=private then
+ unicode=false
+ break
+ end
+ n=n+1
+ t[n]=u[1]
+ else
+ if u>=private then
+ unicode=false
+ break
+ end
+ n=n+1
+ t[n]=u
+ end
+ end
+ if n==0 then
+ elseif n==1 then
+ glyph.unicode=t[1]
+ else
+ glyph.unicode=t
+ end
+ nl=nl+1
+ end
+ if not unicode or unicode=="" then
+ local foundcodes,multiple=lpegmatch(uparser,name)
+ if foundcodes then
+ glyph.unicode=foundcodes
+ if multiple then
+ nl=nl+1
+ unicode=true
+ else
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ local r=overloads[unicode]
+ if r then
+ unicode=r.unicode
+ glyph.unicode=unicode
+ end
+ if not unicode then
+ missing[name]=true
+ end
+ end
+ end
+ if next(missing) then
+ local guess={}
+ local function check(gname,code,unicode)
+ local description=descriptions[code]
+ local variant=description.name
+ if variant==gname then
+ return
+ end
+ local unic=unicodes[variant]
+ if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
+ else
+ return
+ end
+ if descriptions[code].unicode then
+ return
+ end
+ local g=guess[variant]
+ if g then
+ g[gname]=unicode
+ else
+ guess[variant]={ [gname]=unicode }
+ end
+ end
+ for unicode,description in next,descriptions do
+ local slookups=description.slookups
+ if slookups then
+ local gname=description.name
+ for tag,data in next,slookups do
+ local lookuptype=lookuptypes[tag]
+ if lookuptype=="alternate" then
+ for i=1,#data do
+ check(gname,data[i],unicode)
+ end
+ elseif lookuptype=="substitution" then
+ check(gname,data,unicode)
+ end
+ end
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ local gname=description.name
+ for tag,list in next,mlookups do
+ local lookuptype=lookuptypes[tag]
+ if lookuptype=="alternate" then
+ for i=1,#list do
+ local data=list[i]
+ for i=1,#data do
+ check(gname,data[i],unicode)
+ end
+ end
+ elseif lookuptype=="substitution" then
+ for i=1,#list do
+ check(gname,list[i],unicode)
+ end
+ end
+ end
+ end
+ end
+ local done=true
+ while done do
+ done=false
+ for k,v in next,guess do
+ if type(v)~="number" then
+ for kk,vv in next,v do
+ if vv==-1 or vv>=private or (vv>=0xE000 and vv<=0xF8FF) or vv==0xFFFE or vv==0xFFFF then
+ local uu=guess[kk]
+ if type(uu)=="number" then
+ guess[k]=uu
+ done=true
+ end
+ else
+ guess[k]=vv
+ done=true
+ end
+ end
+ end
+ end
+ end
+ local orphans=0
+ local guessed=0
+ for k,v in next,guess do
+ if type(v)=="number" then
+ descriptions[unicodes[k]].unicode=descriptions[v].unicode or v
+ guessed=guessed+1
+ else
+ local t=nil
+ local l=lower(k)
+ local u=unicodes[l]
+ if not u then
+ orphans=orphans+1
+ elseif u==-1 or u>=private or (u>=0xE000 and u<=0xF8FF) or u==0xFFFE or u==0xFFFF then
+ local unicode=descriptions[u].unicode
+ if unicode then
+ descriptions[unicodes[k]].unicode=unicode
+ guessed=guessed+1
+ else
+ orphans=orphans+1
+ end
+ else
+ orphans=orphans+1
+ end
+ end
+ end
+ if trace_loading and orphans>0 or guessed>0 then
+ report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans)
+ end
+ end
+ if trace_mapping then
+ for unic,glyph in table.sortedhash(descriptions) do
+ local name=glyph.name
+ local index=glyph.index
+ local unicode=glyph.unicode
+ if unicode then
+ if type(unicode)=="table" then
+ local unicodes={}
+ for i=1,#unicode do
+ unicodes[i]=formatters("%U",unicode[i])
+ end
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes)
+ else
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode)
+ end
+ else
+ report_fonts("internal slot %U, name %a, unicode %U",index,name,unic)
+ end
+ end
+ end
+ if trace_loading and (ns>0 or nl>0) then
+ report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-syn']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.names=fonts.names or {}
+fonts.names.version=1.001
+fonts.names.basename="luatex-fonts-names"
+fonts.names.new_to_old={}
+fonts.names.old_to_new={}
+fonts.names.cache=containers.define("fonts","data",fonts.names.version,true)
+local data,loaded=nil,false
+local fileformats={ "lua","tex","other text files" }
+function fonts.names.reportmissingbase()
+ texio.write("<missing font database, run: mtxrun --script fonts --reload --simple>")
+ fonts.names.reportmissingbase=nil
+end
+function fonts.names.reportmissingname()
+ texio.write("<unknown font in database, run: mtxrun --script fonts --reload --simple>")
+ fonts.names.reportmissingname=nil
+end
+function fonts.names.resolve(name,sub)
+ if not loaded then
+ local basename=fonts.names.basename
+ if basename and basename~="" then
+ data=containers.read(fonts.names.cache,basename)
+ if not data then
+ basename=file.addsuffix(basename,"lua")
+ for i=1,#fileformats do
+ local format=fileformats[i]
+ local foundname=resolvers.findfile(basename,format) or ""
+ if foundname~="" then
+ data=dofile(foundname)
+ texio.write("<font database loaded: ",foundname,">")
+ break
+ end
+ end
+ end
+ end
+ loaded=true
+ end
+ if type(data)=="table" and data.version==fonts.names.version then
+ local condensed=string.gsub(string.lower(name),"[^%a%d]","")
+ local found=data.mappings and data.mappings[condensed]
+ if found then
+ local fontname,filename,subfont=found[1],found[2],found[3]
+ if subfont then
+ return filename,fontname
+ else
+ return filename,false
+ end
+ elseif fonts.names.reportmissingname then
+ fonts.names.reportmissingname()
+ return name,false
+ end
+ elseif fonts.names.reportmissingbase then
+ fonts.names.reportmissingbase()
+ end
+end
+fonts.names.resolvespec=fonts.names.resolve
+function fonts.names.getfilename(askedname,suffix)
+ return ""
+end
+function fonts.names.ignoredfile(filename)
+ return false
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-tfm']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next=next
+local match=string.match
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local trace_features=false trackers.register("tfm.features",function(v) trace_features=v end)
+local report_defining=logs.reporter("fonts","defining")
+local report_tfm=logs.reporter("fonts","tfm loading")
+local findbinfile=resolvers.findbinfile
+local fonts=fonts
+local handlers=fonts.handlers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local encodings=fonts.encodings
+local tfm=constructors.newhandler("tfm")
+local tfmfeatures=constructors.newfeatures("tfm")
+local registertfmfeature=tfmfeatures.register
+constructors.resolvevirtualtoo=false
+fonts.formats.tfm="type1"
+function tfm.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm)
+ if okay then
+ return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm)
+ else
+ return {}
+ end
+end
+local function read_from_tfm(specification)
+ local filename=specification.filename
+ local size=specification.size
+ if trace_defining then
+ report_defining("loading tfm file %a at size %s",filename,size)
+ end
+ local tfmdata=font.read_tfm(filename,size)
+ if tfmdata then
+ local features=specification.features and specification.features.normal or {}
+ local resources=tfmdata.resources or {}
+ local properties=tfmdata.properties or {}
+ local parameters=tfmdata.parameters or {}
+ local shared=tfmdata.shared or {}
+ properties.name=tfmdata.name
+ properties.fontname=tfmdata.fontname
+ properties.psname=tfmdata.psname
+ properties.filename=specification.filename
+ properties.format=fonts.formats.tfm
+ parameters.size=size
+ tfmdata.properties=properties
+ tfmdata.resources=resources
+ tfmdata.parameters=parameters
+ tfmdata.shared=shared
+ shared.rawdata={}
+ shared.features=features
+ shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil
+ parameters.slant=parameters.slant or parameters[1] or 0
+ parameters.space=parameters.space or parameters[2] or 0
+ parameters.space_stretch=parameters.space_stretch or parameters[3] or 0
+ parameters.space_shrink=parameters.space_shrink or parameters[4] or 0
+ parameters.x_height=parameters.x_height or parameters[5] or 0
+ parameters.quad=parameters.quad or parameters[6] or 0
+ parameters.extra_space=parameters.extra_space or parameters[7] or 0
+ constructors.enhanceparameters(parameters)
+ if constructors.resolvevirtualtoo then
+ fonts.loggers.register(tfmdata,file.suffix(filename),specification)
+ local vfname=findbinfile(specification.name,'ovf')
+ if vfname and vfname~="" then
+ local vfdata=font.read_vf(vfname,size)
+ if vfdata then
+ local chars=tfmdata.characters
+ for k,v in next,vfdata.characters do
+ chars[k].commands=v.commands
+ end
+ properties.virtualized=true
+ tfmdata.fonts=vfdata.fonts
+ end
+ end
+ end
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm)
+ if not features.encoding then
+ local encoding,filename=match(properties.filename,"^(.-)%-(.*)$")
+ if filename and encoding and encodings.known and encodings.known[encoding] then
+ features.encoding=encoding
+ end
+ end
+ properties.haskerns=true
+ properties.haslogatures=true
+ resources.unicodes={}
+ resources.lookuptags={}
+ return tfmdata
+ end
+end
+local function check_tfm(specification,fullname)
+ local foundname=findbinfile(fullname,'tfm') or ""
+ if foundname=="" then
+ foundname=findbinfile(fullname,'ofm') or ""
+ end
+ if foundname=="" then
+ foundname=fonts.names.getfilename(fullname,"tfm") or ""
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="ofm"
+ return read_from_tfm(specification)
+ elseif trace_defining then
+ report_defining("loading tfm with name %a fails",specification.name)
+ end
+end
+readers.check_tfm=check_tfm
+function readers.tfm(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ return check_tfm(specification,fullname)
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-afm']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers
+local next,type,tonumber=next,type,tonumber
+local format,match,gmatch,lower,gsub,strip=string.format,string.match,string.gmatch,string.lower,string.gsub,string.strip
+local abs=math.abs
+local P,S,C,R,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.C,lpeg.R,lpeg.match,lpeg.patterns
+local derivetable=table.derive
+local trace_features=false trackers.register("afm.features",function(v) trace_features=v end)
+local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end)
+local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end)
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local report_afm=logs.reporter("fonts","afm loading")
+local setmetatableindex=table.setmetatableindex
+local findbinfile=resolvers.findbinfile
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local fontloader=fontloader
+local font_to_table=fontloader.to_table
+local open_font=fontloader.open
+local close_font=fontloader.close
+local afm=constructors.newhandler("afm")
+local pfb=constructors.newhandler("pfb")
+local afmfeatures=constructors.newfeatures("afm")
+local registerafmfeature=afmfeatures.register
+afm.version=1.500
+afm.cache=containers.define("fonts","afm",afm.version,true)
+afm.autoprefixed=true
+afm.helpdata={}
+afm.syncspace=true
+afm.addligatures=true
+afm.addtexligatures=true
+afm.addkerns=true
+local overloads=fonts.mappings.overloads
+local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode=lower(value)
+ end
+end
+registerafmfeature {
+ name="mode",
+ description="mode",
+ initializers={
+ base=setmode,
+ node=setmode,
+ }
+}
+local comment=P("Comment")
+local spacing=patterns.spacer
+local lineend=patterns.newline
+local words=C((1-lineend)^1)
+local number=C((R("09")+S("."))^1)/tonumber*spacing^0
+local data=lpeg.Carg(1)
+local pattern=(
+ comment*spacing*(
+ data*(
+ ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end
+ )+(1-lineend)^0
+ )+(1-comment)^1
+)^0
+local function scan_comment(str)
+ local fd={}
+ lpegmatch(pattern,str,1,fd)
+ return fd
+end
+local keys={}
+function keys.FontName (data,line) data.metadata.fontname=strip (line)
+ data.metadata.fullname=strip (line) end
+function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end
+function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch=toboolean(line,true) end
+function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end
+function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end
+function keys.Descender (data,line) data.metadata.descender=tonumber (line) end
+function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end
+function keys.Comment (data,line)
+ line=lower(line)
+ local designsize=match(line,"designsize[^%d]*(%d+)")
+ if designsize then data.metadata.designsize=tonumber(designsize) end
+end
+local function get_charmetrics(data,charmetrics,vector)
+ local characters=data.characters
+ local chr,ind={},0
+ for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do
+ if k=='C' then
+ v=tonumber(v)
+ if v<0 then
+ ind=ind+1
+ else
+ ind=v
+ end
+ chr={
+ index=ind
+ }
+ elseif k=='WX' then
+ chr.width=tonumber(v)
+ elseif k=='N' then
+ characters[v]=chr
+ elseif k=='B' then
+ local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$")
+ chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) }
+ elseif k=='L' then
+ local plus,becomes=match(v,"^(.-) +(.-)$")
+ local ligatures=chr.ligatures
+ if ligatures then
+ ligatures[plus]=becomes
+ else
+ chr.ligatures={ [plus]=becomes }
+ end
+ end
+ end
+end
+local function get_kernpairs(data,kernpairs)
+ local characters=data.characters
+ for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do
+ local chr=characters[one]
+ if chr then
+ local kerns=chr.kerns
+ if kerns then
+ kerns[two]=tonumber(value)
+ else
+ chr.kerns={ [two]=tonumber(value) }
+ end
+ end
+ end
+end
+local function get_variables(data,fontmetrics)
+ for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do
+ local keyhandler=keys[key]
+ if keyhandler then
+ keyhandler(data,rest)
+ end
+ end
+end
+local function get_indexes(data,pfbname)
+ data.resources.filename=resolvers.unresolve(pfbname)
+ local pfbblob=open_font(pfbname)
+ if pfbblob then
+ local characters=data.characters
+ local pfbdata=font_to_table(pfbblob)
+ if pfbdata then
+ local glyphs=pfbdata.glyphs
+ if glyphs then
+ if trace_loading then
+ report_afm("getting index data from %a",pfbname)
+ end
+ for index,glyph in next,glyphs do
+ local name=glyph.name
+ if name then
+ local char=characters[name]
+ if char then
+ if trace_indexing then
+ report_afm("glyph %a has index %a",name,index)
+ end
+ char.index=index
+ end
+ end
+ end
+ elseif trace_loading then
+ report_afm("no glyph data in pfb file %a",pfbname)
+ end
+ elseif trace_loading then
+ report_afm("no data in pfb file %a",pfbname)
+ end
+ close_font(pfbblob)
+ elseif trace_loading then
+ report_afm("invalid pfb file %a",pfbname)
+ end
+end
+local function readafm(filename)
+ local ok,afmblob,size=resolvers.loadbinfile(filename)
+ if ok and afmblob then
+ local data={
+ resources={
+ filename=resolvers.unresolve(filename),
+ version=afm.version,
+ creator="context mkiv",
+ },
+ properties={
+ hasitalics=false,
+ },
+ goodies={},
+ metadata={
+ filename=file.removesuffix(file.basename(filename))
+ },
+ characters={
+ },
+ descriptions={
+ },
+ }
+ afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics)
+ if trace_loading then
+ report_afm("loading char metrics")
+ end
+ get_charmetrics(data,charmetrics,vector)
+ return ""
+ end)
+ afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs)
+ if trace_loading then
+ report_afm("loading kern pairs")
+ end
+ get_kernpairs(data,kernpairs)
+ return ""
+ end)
+ afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics)
+ if trace_loading then
+ report_afm("loading variables")
+ end
+ data.afmversion=version
+ get_variables(data,fontmetrics)
+ data.fontdimens=scan_comment(fontmetrics)
+ return ""
+ end)
+ return data
+ else
+ if trace_loading then
+ report_afm("no valid afm file %a",filename)
+ end
+ return nil
+ end
+end
+local addkerns,addligatures,addtexligatures,unify,normalize,fixnames
+function afm.load(filename)
+ filename=resolvers.findfile(filename,'afm') or ""
+ if filename~="" and not fonts.names.ignoredfile(filename) then
+ local name=file.removesuffix(file.basename(filename))
+ local data=containers.read(afm.cache,name)
+ local attr=lfs.attributes(filename)
+ local size,time=attr.size or 0,attr.modification or 0
+ local pfbfile=file.replacesuffix(name,"pfb")
+ local pfbname=resolvers.findfile(pfbfile,"pfb") or ""
+ if pfbname=="" then
+ pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or ""
+ end
+ local pfbsize,pfbtime=0,0
+ if pfbname~="" then
+ local attr=lfs.attributes(pfbname)
+ pfbsize=attr.size or 0
+ pfbtime=attr.modification or 0
+ end
+ if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then
+ report_afm("reading %a",filename)
+ data=readafm(filename)
+ if data then
+ if pfbname~="" then
+ get_indexes(data,pfbname)
+ elseif trace_loading then
+ report_afm("no pfb file for %a",filename)
+ end
+ report_afm("unifying %a",filename)
+ unify(data,filename)
+ if afm.addligatures then
+ report_afm("add ligatures")
+ addligatures(data)
+ end
+ if afm.addtexligatures then
+ report_afm("add tex ligatures")
+ addtexligatures(data)
+ end
+ if afm.addkerns then
+ report_afm("add extra kerns")
+ addkerns(data)
+ end
+ normalize(data)
+ fixnames(data)
+ report_afm("add tounicode data")
+ fonts.mappings.addtounicode(data,filename)
+ data.size=size
+ data.time=time
+ data.pfbsize=pfbsize
+ data.pfbtime=pfbtime
+ report_afm("saving %a in cache",name)
+ data.resources.unicodes=nil
+ data=containers.write(afm.cache,name,data)
+ data=containers.read(afm.cache,name)
+ end
+ if applyruntimefixes and data then
+ applyruntimefixes(filename,data)
+ end
+ end
+ return data
+ else
+ return nil
+ end
+end
+local uparser=fonts.mappings.makenameparser()
+unify=function(data,filename)
+ local unicodevector=fonts.encodings.agl.unicodes
+ local unicodes,names={},{}
+ local private=constructors.privateoffset
+ local descriptions=data.descriptions
+ for name,blob in next,data.characters do
+ local code=unicodevector[name]
+ if not code then
+ code=lpegmatch(uparser,name)
+ if not code then
+ code=private
+ private=private+1
+ report_afm("assigning private slot %U for unknown glyph name %a",code,name)
+ end
+ end
+ local index=blob.index
+ unicodes[name]=code
+ names[name]=index
+ blob.name=name
+ descriptions[code]={
+ boundingbox=blob.boundingbox,
+ width=blob.width,
+ kerns=blob.kerns,
+ index=index,
+ name=name,
+ }
+ end
+ for unicode,description in next,descriptions do
+ local kerns=description.kerns
+ if kerns then
+ local krn={}
+ for name,kern in next,kerns do
+ local unicode=unicodes[name]
+ if unicode then
+ krn[unicode]=kern
+ else
+ end
+ end
+ description.kerns=krn
+ end
+ end
+ data.characters=nil
+ local resources=data.resources
+ local filename=resources.filename or file.removesuffix(file.basename(filename))
+ resources.filename=resolvers.unresolve(filename)
+ resources.unicodes=unicodes
+ resources.marks={}
+ resources.private=private
+end
+normalize=function(data)
+end
+fixnames=function(data)
+ for k,v in next,data.descriptions do
+ local n=v.name
+ local r=overloads[n]
+ if r then
+ local name=r.name
+ if trace_indexing then
+ report_afm("renaming characters %a to %a",n,name)
+ end
+ v.name=name
+ v.unicode=r.unicode
+ end
+ end
+end
+local addthem=function(rawdata,ligatures)
+ if ligatures then
+ local descriptions=rawdata.descriptions
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ for ligname,ligdata in next,ligatures do
+ local one=descriptions[unicodes[ligname]]
+ if one then
+ for _,pair in next,ligdata do
+ local two,three=unicodes[pair[1]],unicodes[pair[2]]
+ if two and three then
+ local ol=one.ligatures
+ if ol then
+ if not ol[two] then
+ ol[two]=three
+ end
+ else
+ one.ligatures={ [two]=three }
+ end
+ end
+ end
+ end
+ end
+ end
+end
+addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end
+addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end
+addkerns=function(rawdata)
+ local descriptions=rawdata.descriptions
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ local function do_it_left(what)
+ if what then
+ for unicode,description in next,descriptions do
+ local kerns=description.kerns
+ if kerns then
+ local extrakerns
+ for complex,simple in next,what do
+ complex=unicodes[complex]
+ simple=unicodes[simple]
+ if complex and simple then
+ local ks=kerns[simple]
+ if ks and not kerns[complex] then
+ if extrakerns then
+ extrakerns[complex]=ks
+ else
+ extrakerns={ [complex]=ks }
+ end
+ end
+ end
+ end
+ if extrakerns then
+ description.extrakerns=extrakerns
+ end
+ end
+ end
+ end
+ end
+ local function do_it_copy(what)
+ if what then
+ for complex,simple in next,what do
+ complex=unicodes[complex]
+ simple=unicodes[simple]
+ if complex and simple then
+ local complexdescription=descriptions[complex]
+ if complexdescription then
+ local simpledescription=descriptions[complex]
+ if simpledescription then
+ local extrakerns
+ local kerns=simpledescription.kerns
+ if kerns then
+ for unicode,kern in next,kerns do
+ if extrakerns then
+ extrakerns[unicode]=kern
+ else
+ extrakerns={ [unicode]=kern }
+ end
+ end
+ end
+ local extrakerns=simpledescription.extrakerns
+ if extrakerns then
+ for unicode,kern in next,extrakerns do
+ if extrakerns then
+ extrakerns[unicode]=kern
+ else
+ extrakerns={ [unicode]=kern }
+ end
+ end
+ end
+ if extrakerns then
+ complexdescription.extrakerns=extrakerns
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ do_it_left(afm.helpdata.leftkerned)
+ do_it_left(afm.helpdata.bothkerned)
+ do_it_copy(afm.helpdata.bothkerned)
+ do_it_copy(afm.helpdata.rightkerned)
+end
+local function adddimensions(data)
+ if data then
+ for unicode,description in next,data.descriptions do
+ local bb=description.boundingbox
+ if bb then
+ local ht,dp=bb[4],-bb[2]
+ if ht==0 or ht<0 then
+ else
+ description.height=ht
+ end
+ if dp==0 or dp<0 then
+ else
+ description.depth=dp
+ end
+ end
+ end
+ end
+end
+local function copytotfm(data)
+ if data and data.descriptions then
+ local metadata=data.metadata
+ local resources=data.resources
+ local properties=derivetable(data.properties)
+ local descriptions=derivetable(data.descriptions)
+ local goodies=derivetable(data.goodies)
+ local characters={}
+ local parameters={}
+ local unicodes=resources.unicodes
+ for unicode,description in next,data.descriptions do
+ characters[unicode]={}
+ end
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname or metadata.fullname
+ local fullname=metadata.fullname or metadata.fontname
+ local endash=0x0020
+ local emdash=0x2014
+ local spacer="space"
+ local spaceunits=500
+ local monospaced=metadata.isfixedpitch
+ local charwidth=metadata.charwidth
+ local italicangle=metadata.italicangle
+ local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight
+ properties.monospaced=monospaced
+ parameters.italicangle=italicangle
+ parameters.charwidth=charwidth
+ parameters.charxheight=charxheight
+ if properties.monospaced then
+ if descriptions[endash] then
+ spaceunits,spacer=descriptions[endash].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width,"emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ else
+ if descriptions[endash] then
+ spaceunits,spacer=descriptions[endash].width,"space"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ end
+ spaceunits=tonumber(spaceunits)
+ if spaceunits<200 then
+ end
+ parameters.slant=0
+ parameters.space=spaceunits
+ parameters.space_stretch=500
+ parameters.space_shrink=333
+ parameters.x_height=400
+ parameters.quad=1000
+ if italicangle and italicangle~=0 then
+ parameters.italicangle=italicangle
+ parameters.italicfactor=math.cos(math.rad(90+italicangle))
+ parameters.slant=- math.tan(italicangle*math.pi/180)
+ end
+ if monospaced then
+ parameters.space_stretch=0
+ parameters.space_shrink=0
+ elseif afm.syncspace then
+ parameters.space_stretch=spaceunits/2
+ parameters.space_shrink=spaceunits/3
+ end
+ parameters.extra_space=parameters.space_shrink
+ if charxheight then
+ parameters.x_height=charxheight
+ else
+ local x=0x0078
+ if x then
+ local x=descriptions[x]
+ if x then
+ parameters.x_height=x.height
+ end
+ end
+ end
+ local fd=data.fontdimens
+ if fd and fd[8] and fd[9] and fd[10] then
+ for k,v in next,fd do
+ parameters[k]=v
+ end
+ end
+ parameters.designsize=(metadata.designsize or 10)*65536
+ parameters.ascender=abs(metadata.ascender or 0)
+ parameters.descender=abs(metadata.descender or 0)
+ parameters.units=1000
+ properties.spacer=spacer
+ properties.encodingbytes=2
+ properties.format=fonts.formats[filename] or "type1"
+ properties.filename=filename
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.psname=fullname
+ properties.name=filename or fullname or fontname
+ if next(characters) then
+ return {
+ characters=characters,
+ descriptions=descriptions,
+ parameters=parameters,
+ resources=resources,
+ properties=properties,
+ goodies=goodies,
+ }
+ end
+ end
+ return nil
+end
+function afm.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm)
+ if okay then
+ return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm)
+ else
+ return {}
+ end
+end
+local function addtables(data)
+ local resources=data.resources
+ local lookuptags=resources.lookuptags
+ local unicodes=resources.unicodes
+ if not lookuptags then
+ lookuptags={}
+ resources.lookuptags=lookuptags
+ end
+ setmetatableindex(lookuptags,function(t,k)
+ local v=type(k)=="number" and ("lookup "..k) or k
+ t[k]=v
+ return v
+ end)
+ if not unicodes then
+ unicodes={}
+ resources.unicodes=unicodes
+ setmetatableindex(unicodes,function(t,k)
+ setmetatableindex(unicodes,nil)
+ for u,d in next,data.descriptions do
+ local n=d.name
+ if n then
+ t[n]=u
+ end
+ end
+ return rawget(t,k)
+ end)
+ end
+ constructors.addcoreunicodes(unicodes)
+end
+local function afmtotfm(specification)
+ local afmname=specification.filename or specification.name
+ if specification.forced=="afm" or specification.format=="afm" then
+ if trace_loading then
+ report_afm("forcing afm format for %a",afmname)
+ end
+ else
+ local tfmname=findbinfile(afmname,"ofm") or ""
+ if tfmname~="" then
+ if trace_loading then
+ report_afm("fallback from afm to tfm for %a",afmname)
+ end
+ return
+ end
+ end
+ if afmname~="" then
+ local features=constructors.checkedfeatures("afm",specification.features.normal)
+ specification.features.normal=features
+ constructors.hashinstance(specification,true)
+ specification=definers.resolve(specification)
+ local cache_id=specification.hash
+ local tfmdata=containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local rawdata=afm.load(afmname)
+ if rawdata and next(rawdata) then
+ addtables(rawdata)
+ adddimensions(rawdata)
+ tfmdata=copytotfm(rawdata)
+ if tfmdata and next(tfmdata) then
+ local shared=tfmdata.shared
+ if not shared then
+ shared={}
+ tfmdata.shared=shared
+ end
+ shared.rawdata=rawdata
+ shared.features=features
+ shared.processes=afm.setfeatures(tfmdata,features)
+ end
+ elseif trace_loading then
+ report_afm("no (valid) afm file found with name %a",afmname)
+ end
+ tfmdata=containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+ end
+end
+local function read_from_afm(specification)
+ local tfmdata=afmtotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name=specification.name
+ tfmdata=constructors.scale(tfmdata,specification)
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm)
+ fonts.loggers.register(tfmdata,'afm',specification)
+ end
+ return tfmdata
+end
+local function prepareligatures(tfmdata,ligatures,value)
+ if value then
+ local descriptions=tfmdata.descriptions
+ local hasligatures=false
+ for unicode,character in next,tfmdata.characters do
+ local description=descriptions[unicode]
+ local dligatures=description.ligatures
+ if dligatures then
+ local cligatures=character.ligatures
+ if not cligatures then
+ cligatures={}
+ character.ligatures=cligatures
+ end
+ for unicode,ligature in next,dligatures do
+ cligatures[unicode]={
+ char=ligature,
+ type=0
+ }
+ end
+ hasligatures=true
+ end
+ end
+ tfmdata.properties.hasligatures=hasligatures
+ end
+end
+local function preparekerns(tfmdata,kerns,value)
+ if value then
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ local descriptions=tfmdata.descriptions
+ local haskerns=false
+ for u,chr in next,tfmdata.characters do
+ local d=descriptions[u]
+ local newkerns=d[kerns]
+ if newkerns then
+ local kerns=chr.kerns
+ if not kerns then
+ kerns={}
+ chr.kerns=kerns
+ end
+ for k,v in next,newkerns do
+ local uk=unicodes[k]
+ if uk then
+ kerns[uk]=v
+ end
+ end
+ haskerns=true
+ end
+ end
+ tfmdata.properties.haskerns=haskerns
+ end
+end
+local list={
+ [0x0027]=0x2019,
+}
+local function texreplacements(tfmdata,value)
+ local descriptions=tfmdata.descriptions
+ local characters=tfmdata.characters
+ for k,v in next,list do
+ characters [k]=characters [v]
+ descriptions[k]=descriptions[v]
+ end
+end
+local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures',value) end
+local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end
+local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns',value) end
+local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end
+registerafmfeature {
+ name="liga",
+ description="traditional ligatures",
+ initializers={
+ base=ligatures,
+ node=ligatures,
+ }
+}
+registerafmfeature {
+ name="kern",
+ description="intercharacter kerning",
+ initializers={
+ base=kerns,
+ node=kerns,
+ }
+}
+registerafmfeature {
+ name="extrakerns",
+ description="additional intercharacter kerning",
+ initializers={
+ base=extrakerns,
+ node=extrakerns,
+ }
+}
+registerafmfeature {
+ name='tlig',
+ description='tex ligatures',
+ initializers={
+ base=texligatures,
+ node=texligatures,
+ }
+}
+registerafmfeature {
+ name='trep',
+ description='tex replacements',
+ initializers={
+ base=texreplacements,
+ node=texreplacements,
+ }
+}
+local check_tfm=readers.check_tfm
+fonts.formats.afm="type1"
+fonts.formats.pfb="type1"
+local function check_afm(specification,fullname)
+ local foundname=findbinfile(fullname,'afm') or ""
+ if foundname=="" then
+ foundname=fonts.names.getfilename(fullname,"afm") or ""
+ end
+ if foundname=="" and afm.autoprefixed then
+ local encoding,shortname=match(fullname,"^(.-)%-(.*)$")
+ if encoding and shortname and fonts.encodings.known[encoding] then
+ shortname=findbinfile(shortname,'afm') or ""
+ if shortname~="" then
+ foundname=shortname
+ if trace_defining then
+ report_afm("stripping encoding prefix from filename %a",afmname)
+ end
+ end
+ end
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="afm"
+ return read_from_afm(specification)
+ end
+end
+function readers.afm(specification,method)
+ local fullname,tfmdata=specification.filename or "",nil
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ tfmdata=check_afm(specification,specification.name.."."..forced)
+ end
+ if not tfmdata then
+ method=method or definers.method or "afm or tfm"
+ if method=="tfm" then
+ tfmdata=check_tfm(specification,specification.name)
+ elseif method=="afm" then
+ tfmdata=check_afm(specification,specification.name)
+ elseif method=="tfm or afm" then
+ tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name)
+ else
+ tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name)
+ end
+ end
+ else
+ tfmdata=check_afm(specification,fullname)
+ end
+ return tfmdata
+end
+function readers.pfb(specification,method)
+ local original=specification.specification
+ if trace_defining then
+ report_afm("using afm reader for %a",original)
+ end
+ specification.specification=gsub(original,"%.pfb",".afm")
+ specification.forced="afm"
+ return readers.afm(specification,method)
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-afk']={
+ version=1.001,
+ comment="companion to font-afm.lua",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+ dataonly=true,
+}
+local allocate=utilities.storage.allocate
+fonts.handlers.afm.helpdata={
+ ligatures=allocate {
+ ['f']={
+ { 'f','ff' },
+ { 'i','fi' },
+ { 'l','fl' },
+ },
+ ['ff']={
+ { 'i','ffi' }
+ },
+ ['fi']={
+ { 'i','fii' }
+ },
+ ['fl']={
+ { 'i','fli' }
+ },
+ ['s']={
+ { 't','st' }
+ },
+ ['i']={
+ { 'j','ij' }
+ },
+ },
+ texligatures=allocate {
+ ['quoteleft']={
+ { 'quoteleft','quotedblleft' }
+ },
+ ['quoteright']={
+ { 'quoteright','quotedblright' }
+ },
+ ['hyphen']={
+ { 'hyphen','endash' }
+ },
+ ['endash']={
+ { 'hyphen','emdash' }
+ }
+ },
+ leftkerned=allocate {
+ AEligature="A",aeligature="a",
+ OEligature="O",oeligature="o",
+ IJligature="I",ijligature="i",
+ AE="A",ae="a",
+ OE="O",oe="o",
+ IJ="I",ij="i",
+ Ssharp="S",ssharp="s",
+ },
+ rightkerned=allocate {
+ AEligature="E",aeligature="e",
+ OEligature="E",oeligature="e",
+ IJligature="J",ijligature="j",
+ AE="E",ae="e",
+ OE="E",oe="e",
+ IJ="J",ij="j",
+ Ssharp="S",ssharp="s",
+ },
+ bothkerned=allocate {
+ Acircumflex="A",acircumflex="a",
+ Ccircumflex="C",ccircumflex="c",
+ Ecircumflex="E",ecircumflex="e",
+ Gcircumflex="G",gcircumflex="g",
+ Hcircumflex="H",hcircumflex="h",
+ Icircumflex="I",icircumflex="i",
+ Jcircumflex="J",jcircumflex="j",
+ Ocircumflex="O",ocircumflex="o",
+ Scircumflex="S",scircumflex="s",
+ Ucircumflex="U",ucircumflex="u",
+ Wcircumflex="W",wcircumflex="w",
+ Ycircumflex="Y",ycircumflex="y",
+ Agrave="A",agrave="a",
+ Egrave="E",egrave="e",
+ Igrave="I",igrave="i",
+ Ograve="O",ograve="o",
+ Ugrave="U",ugrave="u",
+ Ygrave="Y",ygrave="y",
+ Atilde="A",atilde="a",
+ Itilde="I",itilde="i",
+ Otilde="O",otilde="o",
+ Utilde="U",utilde="u",
+ Ntilde="N",ntilde="n",
+ Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a",
+ Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e",
+ Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i",
+ Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o",
+ Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u",
+ Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y",
+ Aacute="A",aacute="a",
+ Cacute="C",cacute="c",
+ Eacute="E",eacute="e",
+ Iacute="I",iacute="i",
+ Lacute="L",lacute="l",
+ Nacute="N",nacute="n",
+ Oacute="O",oacute="o",
+ Racute="R",racute="r",
+ Sacute="S",sacute="s",
+ Uacute="U",uacute="u",
+ Yacute="Y",yacute="y",
+ Zacute="Z",zacute="z",
+ Dstroke="D",dstroke="d",
+ Hstroke="H",hstroke="h",
+ Tstroke="T",tstroke="t",
+ Cdotaccent="C",cdotaccent="c",
+ Edotaccent="E",edotaccent="e",
+ Gdotaccent="G",gdotaccent="g",
+ Idotaccent="I",idotaccent="i",
+ Zdotaccent="Z",zdotaccent="z",
+ Amacron="A",amacron="a",
+ Emacron="E",emacron="e",
+ Imacron="I",imacron="i",
+ Omacron="O",omacron="o",
+ Umacron="U",umacron="u",
+ Ccedilla="C",ccedilla="c",
+ Kcedilla="K",kcedilla="k",
+ Lcedilla="L",lcedilla="l",
+ Ncedilla="N",ncedilla="n",
+ Rcedilla="R",rcedilla="r",
+ Scedilla="S",scedilla="s",
+ Tcedilla="T",tcedilla="t",
+ Ohungarumlaut="O",ohungarumlaut="o",
+ Uhungarumlaut="U",uhungarumlaut="u",
+ Aogonek="A",aogonek="a",
+ Eogonek="E",eogonek="e",
+ Iogonek="I",iogonek="i",
+ Uogonek="U",uogonek="u",
+ Aring="A",aring="a",
+ Uring="U",uring="u",
+ Abreve="A",abreve="a",
+ Ebreve="E",ebreve="e",
+ Gbreve="G",gbreve="g",
+ Ibreve="I",ibreve="i",
+ Obreve="O",obreve="o",
+ Ubreve="U",ubreve="u",
+ Ccaron="C",ccaron="c",
+ Dcaron="D",dcaron="d",
+ Ecaron="E",ecaron="e",
+ Lcaron="L",lcaron="l",
+ Ncaron="N",ncaron="n",
+ Rcaron="R",rcaron="r",
+ Scaron="S",scaron="s",
+ Tcaron="T",tcaron="t",
+ Zcaron="Z",zcaron="z",
+ dotlessI="I",dotlessi="i",
+ dotlessJ="J",dotlessj="j",
+ AEligature="AE",aeligature="ae",AE="AE",ae="ae",
+ OEligature="OE",oeligature="oe",OE="OE",oe="oe",
+ IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij",
+ Lstroke="L",lstroke="l",Lslash="L",lslash="l",
+ Ostroke="O",ostroke="o",Oslash="O",oslash="o",
+ Ssharp="SS",ssharp="ss",
+ Aumlaut="A",aumlaut="a",
+ Eumlaut="E",eumlaut="e",
+ Iumlaut="I",iumlaut="i",
+ Oumlaut="O",oumlaut="o",
+ Uumlaut="U",uumlaut="u",
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-tfm']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local tfm={}
+fonts.handlers.tfm=tfm
+fonts.formats.tfm="type1"
+function fonts.readers.tfm(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ local foundname=resolvers.findbinfile(fullname,'tfm') or ""
+ if foundname=="" then
+ foundname=resolvers.findbinfile(fullname,'ofm') or ""
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="ofm"
+ return font.read_tfm(specification.filename,specification.size)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-oti']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local lower=string.lower
+local fonts=fonts
+local constructors=fonts.constructors
+local otf=constructors.newhandler("otf")
+local otffeatures=constructors.newfeatures("otf")
+local otftables=otf.tables
+local registerotffeature=otffeatures.register
+local allocate=utilities.storage.allocate
+registerotffeature {
+ name="features",
+ description="initialization of feature handler",
+ default=true,
+}
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode=lower(value)
+ end
+end
+local function setlanguage(tfmdata,value)
+ if value then
+ local cleanvalue=lower(value)
+ local languages=otftables and otftables.languages
+ local properties=tfmdata.properties
+ if not languages then
+ properties.language=cleanvalue
+ elseif languages[value] then
+ properties.language=cleanvalue
+ else
+ properties.language="dflt"
+ end
+ end
+end
+local function setscript(tfmdata,value)
+ if value then
+ local cleanvalue=lower(value)
+ local scripts=otftables and otftables.scripts
+ local properties=tfmdata.properties
+ if not scripts then
+ properties.script=cleanvalue
+ elseif scripts[value] then
+ properties.script=cleanvalue
+ else
+ properties.script="dflt"
+ end
+ end
+end
+registerotffeature {
+ name="mode",
+ description="mode",
+ initializers={
+ base=setmode,
+ node=setmode,
+ }
+}
+registerotffeature {
+ name="language",
+ description="language",
+ initializers={
+ base=setlanguage,
+ node=setlanguage,
+ }
+}
+registerotffeature {
+ name="script",
+ description="script",
+ initializers={
+ base=setscript,
+ node=setscript,
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otf']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local utfbyte=utf.byte
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local abs=math.abs
+local insert=table.insert
+local lpegmatch=lpeg.match
+local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys
+local ioflush=io.flush
+local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive
+local formatters=string.formatters
+local P,R,S,C,Ct,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.match
+local setmetatableindex=table.setmetatableindex
+local allocate=utilities.storage.allocate
+local registertracker=trackers.register
+local registerdirective=directives.register
+local starttiming=statistics.starttiming
+local stoptiming=statistics.stoptiming
+local elapsedtime=statistics.elapsedtime
+local findbinfile=resolvers.findbinfile
+local trace_private=false registertracker("otf.private",function(v) trace_private=v end)
+local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end)
+local trace_features=false registertracker("otf.features",function(v) trace_features=v end)
+local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end)
+local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end)
+local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end)
+local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end)
+local compact_lookups=true registertracker("otf.compactlookups",function(v) compact_lookups=v end)
+local purge_names=true registertracker("otf.purgenames",function(v) purge_names=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local fonts=fonts
+local otf=fonts.handlers.otf
+otf.glists={ "gsub","gpos" }
+otf.version=2.803
+otf.cache=containers.define("fonts","otf",otf.version,true)
+local hashes=fonts.hashes
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local fontdata=hashes and hashes.identifiers
+local chardata=characters and characters.data
+local otffeatures=constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local enhancers=allocate()
+otf.enhancers=enhancers
+local patches={}
+enhancers.patches=patches
+local forceload=false
+local cleanup=0
+local packdata=true
+local syncspace=true
+local forcenotdef=false
+local includesubfonts=false
+local overloadkerns=false
+local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
+local wildcard="*"
+local default="dflt"
+local fontloader=fontloader
+local open_font=fontloader.open
+local close_font=fontloader.close
+local font_fields=fontloader.fields
+local apply_featurefile=fontloader.apply_featurefile
+local mainfields=nil
+local glyphfields=nil
+local formats=fonts.formats
+formats.otf="opentype"
+formats.ttf="truetype"
+formats.ttc="truetype"
+formats.dfont="truetype"
+registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end)
+registerdirective("fonts.otf.loader.force",function(v) forceload=v end)
+registerdirective("fonts.otf.loader.pack",function(v) packdata=v end)
+registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
+registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
+registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end)
+function otf.fileformat(filename)
+ local leader=lower(io.loadchunk(filename,4))
+ local suffix=lower(file.suffix(filename))
+ if leader=="otto" then
+ return formats.otf,suffix=="otf"
+ elseif leader=="ttcf" then
+ return formats.ttc,suffix=="ttc"
+ elseif suffix=="ttc" then
+ return formats.ttc,true
+ elseif suffix=="dfont" then
+ return formats.dfont,true
+ else
+ return formats.ttf,suffix=="ttf"
+ end
+end
+local function otf_format(filename)
+ local format,okay=otf.fileformat(filename)
+ if not okay then
+ report_otf("font %a is actually an %a file",filename,format)
+ end
+ return format
+end
+local function load_featurefile(raw,featurefile)
+ if featurefile and featurefile~="" then
+ if trace_loading then
+ report_otf("using featurefile %a",featurefile)
+ end
+ apply_featurefile(raw,featurefile)
+ end
+end
+local function showfeatureorder(rawdata,filename)
+ local sequences=rawdata.resources.sequences
+ if sequences and #sequences>0 then
+ if trace_loading then
+ report_otf("font %a has %s sequences",filename,#sequences)
+ report_otf(" ")
+ end
+ for nos=1,#sequences do
+ local sequence=sequences[nos]
+ local typ=sequence.type or "no-type"
+ local name=sequence.name or "no-name"
+ local subtables=sequence.subtables or { "no-subtables" }
+ local features=sequence.features
+ if trace_loading then
+ report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables)
+ end
+ if features then
+ for feature,scripts in next,features do
+ local tt={}
+ if type(scripts)=="table" then
+ for script,languages in next,scripts do
+ local ttt={}
+ for language,_ in next,languages do
+ ttt[#ttt+1]=language
+ end
+ tt[#tt+1]=formatters["[%s: % t]"](script,ttt)
+ end
+ if trace_loading then
+ report_otf(" %s: % t",feature,tt)
+ end
+ else
+ if trace_loading then
+ report_otf(" %s: %S",feature,scripts)
+ end
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("\n")
+ end
+ elseif trace_loading then
+ report_otf("font %a has no sequences",filename)
+ end
+end
+local valid_fields=table.tohash {
+ "ascent",
+ "cidinfo",
+ "copyright",
+ "descent",
+ "design_range_bottom",
+ "design_range_top",
+ "design_size",
+ "encodingchanged",
+ "extrema_bound",
+ "familyname",
+ "fontname",
+ "fontstyle_id",
+ "fontstyle_name",
+ "fullname",
+ "hasvmetrics",
+ "horiz_base",
+ "issans",
+ "isserif",
+ "italicangle",
+ "macstyle",
+ "onlybitmaps",
+ "origname",
+ "os2_version",
+ "pfminfo",
+ "serifcheck",
+ "sfd_version",
+ "strokedfont",
+ "strokewidth",
+ "table_version",
+ "ttf_tables",
+ "uni_interp",
+ "uniqueid",
+ "units_per_em",
+ "upos",
+ "use_typo_metrics",
+ "uwidth",
+ "validation_state",
+ "version",
+ "vert_base",
+ "weight",
+ "weight_width_slope_only",
+}
+local ordered_enhancers={
+ "prepare tables",
+ "prepare glyphs",
+ "prepare lookups",
+ "analyze glyphs",
+ "analyze math",
+ "reorganize lookups",
+ "reorganize mark classes",
+ "reorganize anchor classes",
+ "reorganize glyph kerns",
+ "reorganize glyph lookups",
+ "reorganize glyph anchors",
+ "merge kern classes",
+ "reorganize features",
+ "reorganize subtables",
+ "check glyphs",
+ "check metadata",
+ "check extra features",
+ "prepare tounicode",
+ "check encoding",
+ "add duplicates",
+ "cleanup tables",
+ "compact lookups",
+ "purge names",
+}
+local actions=allocate()
+local before=allocate()
+local after=allocate()
+patches.before=before
+patches.after=after
+local function enhance(name,data,filename,raw)
+ local enhancer=actions[name]
+ if enhancer then
+ if trace_loading then
+ report_otf("apply enhancement %a to file %a",name,filename)
+ ioflush()
+ end
+ enhancer(data,filename,raw)
+ else
+ end
+end
+function enhancers.apply(data,filename,raw)
+ local basename=file.basename(lower(filename))
+ if trace_loading then
+ report_otf("%s enhancing file %a","start",filename)
+ end
+ ioflush()
+ for e=1,#ordered_enhancers do
+ local enhancer=ordered_enhancers[e]
+ local b=before[enhancer]
+ if b then
+ for pattern,action in next,b do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ enhance(enhancer,data,filename,raw)
+ local a=after[enhancer]
+ if a then
+ for pattern,action in next,a do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ ioflush()
+ end
+ if trace_loading then
+ report_otf("%s enhancing file %a","stop",filename)
+ end
+ ioflush()
+end
+function patches.register(what,where,pattern,action)
+ local pw=patches[what]
+ if pw then
+ local ww=pw[where]
+ if ww then
+ ww[pattern]=action
+ else
+ pw[where]={ [pattern]=action}
+ end
+ end
+end
+function patches.report(fmt,...)
+ if trace_loading then
+ report_otf("patching: %s",formatters[fmt](...))
+ end
+end
+function enhancers.register(what,action)
+ actions[what]=action
+end
+function otf.load(filename,sub,featurefile)
+ local base=file.basename(file.removesuffix(filename))
+ local name=file.removesuffix(base)
+ local attr=lfs.attributes(filename)
+ local size=attr and attr.size or 0
+ local time=attr and attr.modification or 0
+ if featurefile then
+ name=name.."@"..file.removesuffix(file.basename(featurefile))
+ end
+ if sub=="" then
+ sub=false
+ end
+ local hash=name
+ if sub then
+ hash=hash.."-"..sub
+ end
+ hash=containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles={}
+ for s in gmatch(featurefile,"[^,]+") do
+ local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name=="" then
+ report_otf("loading error, no featurefile %a",s)
+ else
+ local attr=lfs.attributes(name)
+ featurefiles[#featurefiles+1]={
+ name=name,
+ size=attr and attr.size or 0,
+ time=attr and attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles==0 then
+ featurefiles=nil
+ end
+ end
+ local data=containers.read(otf.cache,hash)
+ local reload=not data or data.size~=size or data.time~=time
+ if forceload then
+ report_otf("forced reload of %a due to hard coded flag",filename)
+ reload=true
+ end
+ if not reload then
+ local featuredata=data.featuredata
+ if featurefiles then
+ if not featuredata or #featuredata~=#featurefiles then
+ reload=true
+ else
+ for i=1,#featurefiles do
+ local fi,fd=featurefiles[i],featuredata[i]
+ if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then
+ reload=true
+ break
+ end
+ end
+ end
+ elseif featuredata then
+ reload=true
+ end
+ if reload then
+ report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
+ end
+ end
+ if reload then
+ report_otf("loading %a, hash %a",filename,hash)
+ local fontdata,messages
+ if sub then
+ fontdata,messages=open_font(filename,sub)
+ else
+ fontdata,messages=open_font(filename)
+ end
+ if fontdata then
+ mainfields=mainfields or (font_fields and font_fields(fontdata))
+ end
+ if trace_loading and messages and #messages>0 then
+ if type(messages)=="string" then
+ report_otf("warning: %s",messages)
+ else
+ for m=1,#messages do
+ report_otf("warning: %S",messages[m])
+ end
+ end
+ else
+ report_otf("loading done")
+ end
+ if fontdata then
+ if featurefiles then
+ for i=1,#featurefiles do
+ load_featurefile(fontdata,featurefiles[i].name)
+ end
+ end
+ local unicodes={
+ }
+ local splitter=lpeg.splitter(" ",unicodes)
+ data={
+ size=size,
+ time=time,
+ format=otf_format(filename),
+ featuredata=featurefiles,
+ resources={
+ filename=resolvers.unresolve(filename),
+ version=otf.version,
+ creator="context mkiv",
+ unicodes=unicodes,
+ indices={
+ },
+ duplicates={
+ },
+ variants={
+ },
+ lookuptypes={},
+ },
+ warnings={},
+ metadata={
+ },
+ properties={
+ },
+ descriptions={},
+ goodies={},
+ helpers={
+ tounicodelist=splitter,
+ tounicodetable=Ct(splitter),
+ },
+ }
+ starttiming(data)
+ report_otf("file size: %s",size)
+ enhancers.apply(data,filename,fontdata)
+ local packtime={}
+ if packdata then
+ if cleanup>0 then
+ collectgarbage("collect")
+ end
+ starttiming(packtime)
+ enhance("pack",data,filename,nil)
+ stoptiming(packtime)
+ end
+ report_otf("saving %a in cache",filename)
+ data=containers.write(otf.cache,hash,data)
+ if cleanup>1 then
+ collectgarbage("collect")
+ end
+ stoptiming(data)
+ if elapsedtime then
+ report_otf("preprocessing and caching time %s, packtime %s",
+ elapsedtime(data),packdata and elapsedtime(packtime) or 0)
+ end
+ close_font(fontdata)
+ if cleanup>3 then
+ collectgarbage("collect")
+ end
+ data=containers.read(otf.cache,hash)
+ if cleanup>2 then
+ collectgarbage("collect")
+ end
+ else
+ data=nil
+ report_otf("loading failed due to read error")
+ end
+ end
+ if data then
+ if trace_defining then
+ report_otf("loading from cache using hash %a",hash)
+ end
+ enhance("unpack",data,filename,nil,false)
+ local resources=data.resources
+ local lookuptags=resources.lookuptags
+ local unicodes=resources.unicodes
+ if not lookuptags then
+ lookuptags={}
+ resources.lookuptags=lookuptags
+ end
+ setmetatableindex(lookuptags,function(t,k)
+ local v=type(k)=="number" and ("lookup "..k) or k
+ t[k]=v
+ return v
+ end)
+ if not unicodes then
+ unicodes={}
+ resources.unicodes=unicodes
+ setmetatableindex(unicodes,function(t,k)
+ setmetatableindex(unicodes,nil)
+ for u,d in next,data.descriptions do
+ local n=d.name
+ if n then
+ t[n]=u
+ else
+ end
+ end
+ return rawget(t,k)
+ end)
+ end
+ constructors.addcoreunicodes(unicodes)
+ if applyruntimefixes then
+ applyruntimefixes(filename,data)
+ end
+ enhance("add dimensions",data,filename,nil,false)
+ if trace_sequences then
+ showfeatureorder(data,filename)
+ end
+ end
+ return data
+end
+local mt={
+ __index=function(t,k)
+ if k=="height" then
+ local ht=t.boundingbox[4]
+ return ht<0 and 0 or ht
+ elseif k=="depth" then
+ local dp=-t.boundingbox[2]
+ return dp<0 and 0 or dp
+ elseif k=="width" then
+ return 0
+ elseif k=="name" then
+ return forcenotdef and ".notdef"
+ end
+ end
+}
+actions["prepare tables"]=function(data,filename,raw)
+ data.properties.hasitalics=false
+end
+actions["add dimensions"]=function(data,filename)
+ if data then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local basename=trace_markwidth and file.basename(filename)
+ for _,d in next,descriptions do
+ local bb,wd=d.boundingbox,d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+ end
+ if bb then
+ local ht=bb[4]
+ local dp=-bb[2]
+ if ht==0 or ht<0 then
+ else
+ d.height=ht
+ end
+ if dp==0 or dp<0 then
+ else
+ d.depth=dp
+ end
+ end
+ end
+ end
+end
+local function somecopy(old)
+ if old then
+ local new={}
+ if type(old)=="table" then
+ for k,v in next,old do
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
+ else
+ new[k]=v
+ end
+ end
+ else
+ for i=1,#mainfields do
+ local k=mainfields[i]
+ local v=old[k]
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
+ else
+ new[k]=v
+ end
+ end
+ end
+ return new
+ else
+ return {}
+ end
+end
+actions["prepare glyphs"]=function(data,filename,raw)
+ local rawglyphs=raw.glyphs
+ local rawsubfonts=raw.subfonts
+ local rawcidinfo=raw.cidinfo
+ local criterium=constructors.privateoffset
+ local private=criterium
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local descriptions=data.descriptions
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ local variants=resources.variants
+ if rawsubfonts then
+ metadata.subfonts=includesubfonts and {}
+ properties.cidinfo=rawcidinfo
+ if rawcidinfo.registry then
+ local cidmap=fonts.cid.getmap(rawcidinfo)
+ if cidmap then
+ rawcidinfo.usedname=cidmap.usedname
+ local nofnames,nofunicodes=0,0
+ local cidunicodes,cidnames=cidmap.unicodes,cidmap.names
+ for cidindex=1,#rawsubfonts do
+ local subfont=rawsubfonts[cidindex]
+ local cidglyphs=subfont.glyphs
+ if includesubfonts then
+ metadata.subfonts[cidindex]=somecopy(subfont)
+ end
+ for index=0,subfont.glyphcnt-1 do
+ local glyph=cidglyphs[index]
+ if glyph then
+ local unicode=glyph.unicode
+ if unicode>=0x00E000 and unicode<=0x00F8FF then
+ unicode=-1
+ elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then
+ unicode=-1
+ elseif unicode>=0x100000 and unicode<=0x10FFFD then
+ unicode=-1
+ end
+ local name=glyph.name or cidnames[index]
+ if not unicode or unicode==-1 then
+ unicode=cidunicodes[index]
+ end
+ if unicode and descriptions[unicode] then
+ if trace_private then
+ report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
+ end
+ unicode=-1
+ end
+ if not unicode or unicode==-1 then
+ if not name then
+ name=format("u%06X.ctx",private)
+ end
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private=private+1
+ nofnames=nofnames+1
+ else
+ if not name then
+ name=format("u%06X.ctx",unicode)
+ end
+ unicodes[name]=unicode
+ nofunicodes=nofunicodes+1
+ end
+ indices[index]=unicode
+ local description={
+ boundingbox=glyph.boundingbox,
+ name=name or "unknown",
+ cidindex=cidindex,
+ index=index,
+ glyph=glyph,
+ }
+ descriptions[unicode]=description
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames)
+ end
+ elseif trace_loading then
+ report_otf("unable to remap cid font, missing cid file for %a",filename)
+ end
+ elseif trace_loading then
+ report_otf("font %a has no glyphs",filename)
+ end
+ else
+ for index=0,raw.glyphcnt-1 do
+ local glyph=rawglyphs[index]
+ if glyph then
+ local unicode=glyph.unicode
+ local name=glyph.name
+ if not unicode or unicode==-1 then
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private=private+1
+ else
+ if unicode>criterium then
+ local taken=descriptions[unicode]
+ if taken then
+ if unicode>=private then
+ private=unicode+1
+ else
+ private=private+1
+ end
+ descriptions[private]=taken
+ unicodes[taken.name]=private
+ indices[taken.index]=private
+ if trace_private then
+ report_otf("slot %U is moved to %U due to private in font",unicode)
+ end
+ else
+ if unicode>=private then
+ private=unicode+1
+ end
+ end
+ end
+ unicodes[name]=unicode
+ end
+ indices[index]=unicode
+ descriptions[unicode]={
+ boundingbox=glyph.boundingbox,
+ name=name,
+ index=index,
+ glyph=glyph,
+ }
+ local altuni=glyph.altuni
+ if altuni then
+ for i=1,#altuni do
+ local a=altuni[i]
+ local u=a.unicode
+ local v=a.variant
+ if v then
+ local vv=variants[v]
+ if vv then
+ vv[u]=unicode
+ else
+ vv={ [u]=unicode }
+ variants[v]=vv
+ end
+ end
+ end
+ end
+ else
+ report_otf("potential problem: glyph %U is used but empty",index)
+ end
+ end
+ end
+ resources.private=private
+end
+actions["check encoding"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ local mapdata=raw.map or {}
+ local unicodetoindex=mapdata and mapdata.map or {}
+ local indextounicode=mapdata and mapdata.backmap or {}
+ local encname=lower(data.enc_name or mapdata.enc_name or "")
+ local criterium=0xFFFF
+ local privateoffset=constructors.privateoffset
+ if find(encname,"unicode") then
+ if trace_loading then
+ report_otf("checking embedded unicode map %a",encname)
+ end
+ local reported={}
+ for maybeunicode,index in next,unicodetoindex do
+ if descriptions[maybeunicode] then
+ else
+ local unicode=indices[index]
+ if not unicode then
+ elseif maybeunicode==unicode then
+ elseif unicode>privateoffset then
+ else
+ local d=descriptions[unicode]
+ if d then
+ local c=d.copies
+ if c then
+ c[maybeunicode]=true
+ else
+ d.copies={ [maybeunicode]=true }
+ end
+ elseif index and not reported[index] then
+ report_otf("missing index %i",index)
+ reported[index]=true
+ end
+ end
+ end
+ end
+ for unicode,data in next,descriptions do
+ local d=data.copies
+ if d then
+ duplicates[unicode]=sortedkeys(d)
+ data.copies=nil
+ end
+ end
+ elseif properties.cidinfo then
+ report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
+ else
+ report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever")
+ end
+ if mapdata then
+ mapdata.map={}
+ mapdata.backmap={}
+ end
+end
+actions["add duplicates"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ for unicode,d in next,duplicates do
+ local nofduplicates=#d
+ if nofduplicates>4 then
+ if trace_loading then
+ report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
+ end
+ else
+ for i=1,nofduplicates do
+ local u=d[i]
+ if not descriptions[u] then
+ local description=descriptions[unicode]
+ local n=0
+ for _,description in next,descriptions do
+ local kerns=description.kerns
+ if kerns then
+ for _,k in next,kerns do
+ local ku=k[unicode]
+ if ku then
+ k[u]=ku
+ n=n+1
+ end
+ end
+ end
+ end
+ if u>0 then
+ local duplicate=table.copy(description)
+ duplicate.comment=format("copy of U+%05X",unicode)
+ descriptions[u]=duplicate
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ end
+ end
+ end
+ end
+ end
+ end
+end
+actions["analyze glyphs"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local hasitalics=false
+ local widths={}
+ local marks={}
+ for unicode,description in next,descriptions do
+ local glyph=description.glyph
+ local italic=glyph.italic_correction
+ if not italic then
+ elseif italic==0 then
+ else
+ description.italic=italic
+ hasitalics=true
+ end
+ local width=glyph.width
+ widths[width]=(widths[width] or 0)+1
+ local class=glyph.class
+ if class then
+ if class=="mark" then
+ marks[unicode]=true
+ end
+ description.class=class
+ end
+ end
+ properties.hasitalics=hasitalics
+ resources.marks=marks
+ local wd,most=0,1
+ for k,v in next,widths do
+ if v>most then
+ wd,most=k,v
+ end
+ end
+ if most>1000 then
+ if trace_loading then
+ report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
+ end
+ for unicode,description in next,descriptions do
+ if description.width==wd then
+ else
+ description.width=description.glyph.width
+ end
+ end
+ resources.defaultwidth=wd
+ else
+ for unicode,description in next,descriptions do
+ description.width=description.glyph.width
+ end
+ end
+end
+actions["reorganize mark classes"]=function(data,filename,raw)
+ local mark_classes=raw.mark_classes
+ if mark_classes then
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local markclasses={}
+ resources.markclasses=markclasses
+ for name,class in next,mark_classes do
+ local t={}
+ for s in gmatch(class,"[^ ]+") do
+ t[unicodes[s]]=true
+ end
+ markclasses[name]=t
+ end
+ end
+end
+actions["reorganize features"]=function(data,filename,raw)
+ local features={}
+ data.resources.features=features
+ for k,what in next,otf.glists do
+ local dw=raw[what]
+ if dw then
+ local f={}
+ features[what]=f
+ for i=1,#dw do
+ local d=dw[i]
+ local dfeatures=d.features
+ if dfeatures then
+ for i=1,#dfeatures do
+ local df=dfeatures[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+actions["reorganize anchor classes"]=function(data,filename,raw)
+ local resources=data.resources
+ local anchor_to_lookup={}
+ local lookup_to_anchor={}
+ resources.anchor_to_lookup=anchor_to_lookup
+ resources.lookup_to_anchor=lookup_to_anchor
+ local classes=raw.anchor_classes
+ if classes then
+ for c=1,#classes do
+ local class=classes[c]
+ local anchor=class.name
+ local lookups=class.lookup
+ if type(lookups)~="table" then
+ lookups={ lookups }
+ end
+ local a=anchor_to_lookup[anchor]
+ if not a then
+ a={}
+ anchor_to_lookup[anchor]=a
+ end
+ for l=1,#lookups do
+ local lookup=lookups[l]
+ local l=lookup_to_anchor[lookup]
+ if l then
+ l[anchor]=true
+ else
+ l={ [anchor]=true }
+ lookup_to_anchor[lookup]=l
+ end
+ a[lookup]=true
+ end
+ end
+ end
+end
+actions["prepare tounicode"]=function(data,filename,raw)
+ fonts.mappings.addtounicode(data,filename)
+end
+local g_directions={
+ gsub_contextchain=1,
+ gpos_contextchain=1,
+ gsub_reversecontextchain=-1,
+ gpos_reversecontextchain=-1,
+}
+actions["reorganize subtables"]=function(data,filename,raw)
+ local resources=data.resources
+ local sequences={}
+ local lookups={}
+ local chainedfeatures={}
+ resources.sequences=sequences
+ resources.lookups=lookups
+ for _,what in next,otf.glists do
+ local dw=raw[what]
+ if dw then
+ for k=1,#dw do
+ local gk=dw[k]
+ local features=gk.features
+ local typ=gk.type
+ local chain=g_directions[typ] or 0
+ local subtables=gk.subtables
+ if subtables then
+ local t={}
+ for s=1,#subtables do
+ t[s]=subtables[s].name
+ end
+ subtables=t
+ end
+ local flags,markclass=gk.flags,nil
+ if flags then
+ local t={
+ (flags.ignorecombiningmarks and "mark") or false,
+ (flags.ignoreligatures and "ligature") or false,
+ (flags.ignorebaseglyphs and "base") or false,
+ flags.r2l or false,
+ }
+ markclass=flags.mark_class
+ if markclass then
+ markclass=resources.markclasses[markclass]
+ end
+ flags=t
+ end
+ local name=gk.name
+ if not name then
+ report_otf("skipping weird lookup number %s",k)
+ elseif features then
+ local f={}
+ local o={}
+ for i=1,#features do
+ local df=features[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ o[#o+1]=tag
+ end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ sequences[#sequences+1]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ name=name,
+ subtables=subtables,
+ markclass=markclass,
+ features=f,
+ order=o,
+ }
+ else
+ lookups[name]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ subtables=subtables,
+ markclass=markclass,
+ }
+ end
+ end
+ end
+ end
+end
+actions["prepare lookups"]=function(data,filename,raw)
+ local lookups=raw.lookups
+ if lookups then
+ data.lookups=lookups
+ end
+end
+local function t_uncover(splitter,cache,covers)
+ local result={}
+ for n=1,#covers do
+ local cover=covers[n]
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
+ end
+ result[n]=uncovered
+ end
+ return result
+end
+local function s_uncover(splitter,cache,cover)
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
+ end
+ return { uncovered }
+ end
+end
+local function t_hashed(t,cache)
+ if t then
+ local ht={}
+ for i=1,#t do
+ local ti=t[i]
+ local tih=cache[ti]
+ if not tih then
+ local tn=#ti
+ if tn==1 then
+ tih={ [ti[1]]=true }
+ else
+ tih={}
+ for i=1,tn do
+ tih[ti[i]]=true
+ end
+ end
+ cache[ti]=tih
+ end
+ ht[i]=tih
+ end
+ return ht
+ else
+ return nil
+ end
+end
+local function s_hashed(t,cache)
+ if t then
+ local tf=t[1]
+ local nf=#tf
+ if nf==1 then
+ return { [tf[1]]=true }
+ else
+ local ht={}
+ for i=1,nf do
+ ht[i]={ [tf[i]]=true }
+ end
+ return ht
+ end
+ else
+ return nil
+ end
+end
+local function r_uncover(splitter,cache,cover,replacements)
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cover[1]
+ local replaced=cache[replacements]
+ if not replaced then
+ replaced=lpegmatch(splitter,replacements)
+ cache[replacements]=replaced
+ end
+ local nu,nr=#uncovered,#replaced
+ local r={}
+ if nu==nr then
+ for i=1,nu do
+ r[uncovered[i]]=replaced[i]
+ end
+ end
+ return r
+ end
+end
+actions["reorganize lookups"]=function(data,filename,raw)
+ if data.lookups then
+ local splitter=data.helpers.tounicodetable
+ local t_u_cache={}
+ local s_u_cache=t_u_cache
+ local t_h_cache={}
+ local s_h_cache=t_h_cache
+ local r_u_cache={}
+ for _,lookup in next,data.lookups do
+ local rules=lookup.rules
+ if rules then
+ local format=lookup.format
+ if format=="class" then
+ local before_class=lookup.before_class
+ if before_class then
+ before_class=t_uncover(splitter,t_u_cache,reversed(before_class))
+ end
+ local current_class=lookup.current_class
+ if current_class then
+ current_class=t_uncover(splitter,t_u_cache,current_class)
+ end
+ local after_class=lookup.after_class
+ if after_class then
+ after_class=t_uncover(splitter,t_u_cache,after_class)
+ end
+ for i=1,#rules do
+ local rule=rules[i]
+ local class=rule.class
+ local before=class.before
+ if before then
+ for i=1,#before do
+ before[i]=before_class[before[i]] or {}
+ end
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=class.current
+ local lookups=rule.lookups
+ if current then
+ for i=1,#current do
+ current[i]=current_class[current[i]] or {}
+ if lookups and not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=class.after
+ if after then
+ for i=1,#after do
+ after[i]=after_class[after[i]] or {}
+ end
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.class=nil
+ end
+ lookup.before_class=nil
+ lookup.current_class=nil
+ lookup.after_class=nil
+ lookup.format="coverage"
+ elseif format=="coverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local coverage=rule.coverage
+ if coverage then
+ local before=coverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=coverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ local lookups=rule.lookups
+ if lookups then
+ for i=1,#current do
+ if not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ end
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=coverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.coverage=nil
+ end
+ end
+ elseif format=="reversecoverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local reversecoverage=rule.reversecoverage
+ if reversecoverage then
+ local before=reversecoverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=reversecoverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=reversecoverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ local replacements=reversecoverage.replacements
+ if replacements then
+ rule.replacements=r_uncover(splitter,r_u_cache,current,replacements)
+ end
+ rule.reversecoverage=nil
+ end
+ end
+ elseif format=="glyphs" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local glyphs=rule.glyphs
+ if glyphs then
+ local fore=glyphs.fore
+ if fore and fore~="" then
+ fore=s_uncover(splitter,s_u_cache,fore)
+ rule.after=s_hashed(fore,s_h_cache)
+ end
+ local back=glyphs.back
+ if back then
+ back=s_uncover(splitter,s_u_cache,back)
+ rule.before=s_hashed(back,s_h_cache)
+ end
+ local names=glyphs.names
+ if names then
+ names=s_uncover(splitter,s_u_cache,names)
+ rule.current=s_hashed(names,s_h_cache)
+ end
+ rule.glyphs=nil
+ local lookups=rule.lookups
+ if lookups then
+ for i=1,#names do
+ if not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+local function check_variants(unicode,the_variants,splitter,unicodes)
+ local variants=the_variants.variants
+ if variants then
+ local glyphs=lpegmatch(splitter,variants)
+ local done={ [unicode]=true }
+ local n=0
+ for i=1,#glyphs do
+ local g=glyphs[i]
+ if done[g] then
+ if i>1 then
+ report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ end
+ else
+ if n==0 then
+ n=1
+ variants={ g }
+ else
+ n=n+1
+ variants[n]=g
+ end
+ done[g]=true
+ end
+ end
+ if n==0 then
+ variants=nil
+ end
+ end
+ local parts=the_variants.parts
+ if parts then
+ local p=#parts
+ if p>0 then
+ for i=1,p do
+ local pi=parts[i]
+ pi.glyph=unicodes[pi.component] or 0
+ pi.component=nil
+ end
+ else
+ parts=nil
+ end
+ end
+ local italic_correction=the_variants.italic_correction
+ if italic_correction and italic_correction==0 then
+ italic_correction=nil
+ end
+ return variants,parts,italic_correction
+end
+actions["analyze math"]=function(data,filename,raw)
+ if raw.math then
+ data.metadata.math=raw.math
+ local unicodes=data.resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ for unicode,description in next,data.descriptions do
+ local glyph=description.glyph
+ local mathkerns=glyph.mathkern
+ local horiz_variants=glyph.horiz_variants
+ local vert_variants=glyph.vert_variants
+ local top_accent=glyph.top_accent
+ if mathkerns or horiz_variants or vert_variants or top_accent then
+ local math={}
+ if top_accent then
+ math.top_accent=top_accent
+ end
+ if mathkerns then
+ for k,v in next,mathkerns do
+ if not next(v) then
+ mathkerns[k]=nil
+ else
+ for k,v in next,v do
+ if v==0 then
+ k[v]=nil
+ end
+ end
+ end
+ end
+ math.kerns=mathkerns
+ end
+ if horiz_variants then
+ math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes)
+ end
+ if vert_variants then
+ math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes)
+ end
+ local italic_correction=description.italic
+ if italic_correction and italic_correction~=0 then
+ math.italic_correction=italic_correction
+ end
+ description.math=math
+ end
+ end
+ end
+end
+actions["reorganize glyph kerns"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ for unicode,description in next,descriptions do
+ local kerns=description.glyph.kerns
+ if kerns then
+ local newkerns={}
+ for k,kern in next,kerns do
+ local name=kern.char
+ local offset=kern.off
+ local lookup=kern.lookup
+ if name and offset and lookup then
+ local unicode=unicodes[name]
+ if unicode then
+ if type(lookup)=="table" then
+ for l=1,#lookup do
+ local lookup=lookup[l]
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
+ end
+ end
+ else
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
+ end
+ end
+ elseif trace_loading then
+ report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode)
+ end
+ end
+ end
+ description.kerns=newkerns
+ end
+ end
+end
+actions["merge kern classes"]=function(data,filename,raw)
+ local gposlist=raw.gpos
+ if gposlist then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ local ignored=0
+ local blocked=0
+ for gp=1,#gposlist do
+ local gpos=gposlist[gp]
+ local subtables=gpos.subtables
+ if subtables then
+ local first_done={}
+ local split={}
+ for s=1,#subtables do
+ local subtable=subtables[s]
+ local kernclass=subtable.kernclass
+ local lookup=subtable.lookup or subtable.name
+ if kernclass then
+ if #kernclass>0 then
+ kernclass=kernclass[1]
+ lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup
+ report_otf("fixing kernclass table of lookup %a",lookup)
+ end
+ local firsts=kernclass.firsts
+ local seconds=kernclass.seconds
+ local offsets=kernclass.offsets
+ for n,s in next,firsts do
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ local maxseconds=0
+ for n,s in next,seconds do
+ if n>maxseconds then
+ maxseconds=n
+ end
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ for fk=1,#firsts do
+ local fv=firsts[fk]
+ local splt=split[fv]
+ if splt then
+ local extrakerns={}
+ local baseoffset=(fk-1)*maxseconds
+ for sk=2,maxseconds do
+ local sv=seconds[sk]
+ local splt=split[sv]
+ if splt then
+ local offset=offsets[baseoffset+sk]
+ if offset then
+ for i=1,#splt do
+ extrakerns[splt[i]]=offset
+ end
+ end
+ end
+ end
+ for i=1,#splt do
+ local first_unicode=splt[i]
+ if first_done[first_unicode] then
+ report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode)
+ blocked=blocked+1
+ else
+ first_done[first_unicode]=true
+ local description=descriptions[first_unicode]
+ if description then
+ local kerns=description.kerns
+ if not kerns then
+ kerns={}
+ description.kerns=kerns
+ end
+ local lookupkerns=kerns[lookup]
+ if not lookupkerns then
+ lookupkerns={}
+ kerns[lookup]=lookupkerns
+ end
+ if overloadkerns then
+ for second_unicode,kern in next,extrakerns do
+ lookupkerns[second_unicode]=kern
+ end
+ else
+ for second_unicode,kern in next,extrakerns do
+ local k=lookupkerns[second_unicode]
+ if not k then
+ lookupkerns[second_unicode]=kern
+ elseif k~=kern then
+ if trace_loading then
+ report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern)
+ end
+ ignored=ignored+1
+ end
+ end
+ end
+ elseif trace_loading then
+ report_otf("no glyph data for %U",first_unicode)
+ end
+ end
+ end
+ end
+ end
+ subtable.kernclass={}
+ end
+ end
+ end
+ end
+ if ignored>0 then
+ report_otf("%s kern overloads ignored",ignored)
+ end
+ if blocked>0 then
+ report_otf("%s succesive kerns blocked",blocked)
+ end
+ end
+end
+actions["check glyphs"]=function(data,filename,raw)
+ for unicode,description in next,data.descriptions do
+ description.glyph=nil
+ end
+end
+local valid=(R("\x00\x7E")-S("(){}[]<>%/ \n\r\f\v"))^0*P(-1)
+local function valid_ps_name(str)
+ return str and str~="" and #str<64 and lpegmatch(valid,str) and true or false
+end
+actions["check metadata"]=function(data,filename,raw)
+ local metadata=data.metadata
+ for _,k in next,mainfields do
+ if valid_fields[k] then
+ local v=raw[k]
+ if not metadata[k] then
+ metadata[k]=v
+ end
+ end
+ end
+ local ttftables=metadata.ttf_tables
+ if ttftables then
+ for i=1,#ttftables do
+ ttftables[i].data="deleted"
+ end
+ end
+ if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
+ local function valid(what)
+ local names=raw.names
+ for i=1,#names do
+ local list=names[i]
+ local names=list.names
+ if names then
+ local name=names[what]
+ if name and valid_ps_name(name) then
+ return name
+ end
+ end
+ end
+ end
+ local function check(what)
+ local oldname=metadata[what]
+ if valid_ps_name(oldname) then
+ report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname)
+ else
+ local newname=valid(what)
+ if not newname then
+ newname=formatters["bad-%s-%s"](what,file.nameonly(filename))
+ end
+ local warning=formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname)
+ data.warnings[#data.warnings+1]=warning
+ report_otf(warning)
+ metadata[what]=newname
+ end
+ end
+ check("fontname")
+ check("fullname")
+ end
+end
+actions["cleanup tables"]=function(data,filename,raw)
+ local duplicates=data.resources.duplicates
+ if duplicates then
+ for k,v in next,duplicates do
+ if #v==1 then
+ duplicates[k]=v[1]
+ end
+ end
+ end
+ data.resources.indices=nil
+ data.resources.unicodes=nil
+ data.helpers=nil
+end
+actions["reorganize glyph lookups"]=function(data,filename,raw)
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local descriptions=data.descriptions
+ local splitter=data.helpers.tounicodelist
+ local lookuptypes=resources.lookuptypes
+ for unicode,description in next,descriptions do
+ local lookups=description.glyph.lookups
+ if lookups then
+ for tag,lookuplist in next,lookups do
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local specification=lookup.specification
+ local lookuptype=lookup.type
+ local lt=lookuptypes[tag]
+ if not lt then
+ lookuptypes[tag]=lookuptype
+ elseif lt~=lookuptype then
+ report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype)
+ end
+ if lookuptype=="ligature" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="alternate" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="substitution" then
+ lookuplist[l]=unicodes[specification.variant]
+ elseif lookuptype=="multiple" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="position" then
+ lookuplist[l]={
+ specification.x or 0,
+ specification.y or 0,
+ specification.h or 0,
+ specification.v or 0
+ }
+ elseif lookuptype=="pair" then
+ local one=specification.offsets[1]
+ local two=specification.offsets[2]
+ local paired=unicodes[specification.paired]
+ if one then
+ if two then
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } }
+ else
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } }
+ end
+ else
+ if two then
+ lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} }
+ else
+ lookuplist[l]={ paired }
+ end
+ end
+ end
+ end
+ end
+ local slookups,mlookups
+ for tag,lookuplist in next,lookups do
+ if #lookuplist==1 then
+ if slookups then
+ slookups[tag]=lookuplist[1]
+ else
+ slookups={ [tag]=lookuplist[1] }
+ end
+ else
+ if mlookups then
+ mlookups[tag]=lookuplist
+ else
+ mlookups={ [tag]=lookuplist }
+ end
+ end
+ end
+ if slookups then
+ description.slookups=slookups
+ end
+ if mlookups then
+ description.mlookups=mlookups
+ end
+ end
+ end
+end
+local zero={ 0,0 }
+actions["reorganize glyph anchors"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ for unicode,description in next,descriptions do
+ local anchors=description.glyph.anchors
+ if anchors then
+ for class,data in next,anchors do
+ if class=="baselig" then
+ for tag,specification in next,data do
+ local n=0
+ for k,v in next,specification do
+ if k>n then
+ n=k
+ end
+ local x,y=v.x,v.y
+ if x or y then
+ specification[k]={ x or 0,y or 0 }
+ else
+ specification[k]=zero
+ end
+ end
+ local t={}
+ for i=1,n do
+ t[i]=specification[i] or zero
+ end
+ data[tag]=t
+ end
+ else
+ for tag,specification in next,data do
+ local x,y=specification.x,specification.y
+ if x or y then
+ data[tag]={ x or 0,y or 0 }
+ else
+ data[tag]=zero
+ end
+ end
+ end
+ end
+ description.anchors=anchors
+ end
+ end
+end
+local bogusname=(P("uni")+P("u"))*R("AF","09")^4+(P("index")+P("glyph")+S("Ii")*P("dentity")*P(".")^0)*R("09")^1
+local uselessname=(1-bogusname)^0*bogusname
+actions["purge names"]=function(data,filename,raw)
+ if purge_names then
+ local n=0
+ for u,d in next,data.descriptions do
+ if lpegmatch(uselessname,d.name) then
+ n=n+1
+ d.name=nil
+ end
+ end
+ if n>0 then
+ report_otf("%s bogus names removed",n)
+ end
+ end
+end
+actions["compact lookups"]=function(data,filename,raw)
+ if not compact_lookups then
+ report_otf("not compacting")
+ return
+ end
+ local last=0
+ local tags=table.setmetatableindex({},
+ function(t,k)
+ last=last+1
+ t[k]=last
+ return last
+ end
+ )
+ local descriptions=data.descriptions
+ local resources=data.resources
+ for u,d in next,descriptions do
+ local slookups=d.slookups
+ if type(slookups)=="table" then
+ local s={}
+ for k,v in next,slookups do
+ s[tags[k]]=v
+ end
+ d.slookups=s
+ end
+ local mlookups=d.mlookups
+ if type(mlookups)=="table" then
+ local m={}
+ for k,v in next,mlookups do
+ m[tags[k]]=v
+ end
+ d.mlookups=m
+ end
+ local kerns=d.kerns
+ if type(kerns)=="table" then
+ local t={}
+ for k,v in next,kerns do
+ t[tags[k]]=v
+ end
+ d.kerns=t
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ local l={}
+ for k,v in next,lookups do
+ local rules=v.rules
+ if rules then
+ for i=1,#rules do
+ local l=rules[i].lookups
+ if type(l)=="table" then
+ for i=1,#l do
+ l[i]=tags[l[i]]
+ end
+ end
+ end
+ end
+ l[tags[k]]=v
+ end
+ data.lookups=l
+ end
+ local lookups=resources.lookups
+ if lookups then
+ local l={}
+ for k,v in next,lookups do
+ local s=v.subtables
+ if type(s)=="table" then
+ for i=1,#s do
+ s[i]=tags[s[i]]
+ end
+ end
+ l[tags[k]]=v
+ end
+ resources.lookups=l
+ end
+ local sequences=resources.sequences
+ if sequences then
+ for i=1,#sequences do
+ local s=sequences[i]
+ local n=s.name
+ if n then
+ s.name=tags[n]
+ end
+ local t=s.subtables
+ if type(t)=="table" then
+ for i=1,#t do
+ t[i]=tags[t[i]]
+ end
+ end
+ end
+ end
+ local lookuptypes=resources.lookuptypes
+ if lookuptypes then
+ local l={}
+ for k,v in next,lookuptypes do
+ l[tags[k]]=v
+ end
+ resources.lookuptypes=l
+ end
+ local anchor_to_lookup=resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor,lookups in next,anchor_to_lookup do
+ local l={}
+ for lookup,value in next,lookups do
+ l[tags[lookup]]=value
+ end
+ anchor_to_lookup[anchor]=l
+ end
+ end
+ local lookup_to_anchor=resources.lookup_to_anchor
+ if lookup_to_anchor then
+ local l={}
+ for lookup,value in next,lookup_to_anchor do
+ l[tags[lookup]]=value
+ end
+ resources.lookup_to_anchor=l
+ end
+ tags=table.swapped(tags)
+ report_otf("%s lookup tags compacted",#tags)
+ resources.lookuptags=tags
+end
+function otf.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
+ if okay then
+ return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
+ else
+ return {}
+ end
+end
+local function copytotfm(data,cache_id)
+ if data then
+ local metadata=data.metadata
+ local warnings=data.warnings
+ local resources=data.resources
+ local properties=derivetable(data.properties)
+ local descriptions=derivetable(data.descriptions)
+ local goodies=derivetable(data.goodies)
+ local characters={}
+ local parameters={}
+ local mathparameters={}
+ local pfminfo=metadata.pfminfo or {}
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local spaceunits=500
+ local spacer="space"
+ local designsize=metadata.designsize or metadata.design_size or 100
+ local mathspecs=metadata.math
+ if designsize==0 then
+ designsize=100
+ end
+ if mathspecs then
+ for name,value in next,mathspecs do
+ mathparameters[name]=value
+ end
+ end
+ for unicode,_ in next,data.descriptions do
+ characters[unicode]={}
+ end
+ if mathspecs then
+ for unicode,character in next,characters do
+ local d=descriptions[unicode]
+ local m=d.math
+ if m then
+ local variants=m.horiz_variants
+ local parts=m.horiz_parts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.horiz_variants=parts
+ elseif parts then
+ character.horiz_variants=parts
+ end
+ local variants=m.vert_variants
+ local parts=m.vert_parts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.vert_variants=parts
+ elseif parts then
+ character.vert_variants=parts
+ end
+ local italic_correction=m.vert_italic_correction
+ if italic_correction then
+ character.vert_italic_correction=italic_correction
+ end
+ local top_accent=m.top_accent
+ if top_accent then
+ character.top_accent=top_accent
+ end
+ local kerns=m.kerns
+ if kerns then
+ character.mathkerns=kerns
+ end
+ end
+ end
+ end
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname
+ local fullname=metadata.fullname or fontname
+ local psname=fontname or fullname
+ local units=metadata.units_per_em or 1000
+ if units==0 then
+ units=1000
+ metadata.units_per_em=1000
+ report_otf("changing %a units to %a",0,units)
+ end
+ local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced")
+ local charwidth=pfminfo.avgwidth
+ local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight
+ local italicangle=metadata.italicangle
+ properties.monospaced=monospaced
+ parameters.italicangle=italicangle
+ parameters.charwidth=charwidth
+ parameters.charxheight=charxheight
+ local space=0x0020
+ local emdash=0x2014
+ if monospaced then
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width,"emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ else
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width/2,"emdash/2"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ end
+ spaceunits=tonumber(spaceunits) or 500
+ parameters.slant=0
+ parameters.space=spaceunits
+ parameters.space_stretch=units/2
+ parameters.space_shrink=1*units/3
+ parameters.x_height=2*units/5
+ parameters.quad=units
+ if spaceunits<2*units/5 then
+ end
+ if italicangle and italicangle~=0 then
+ parameters.italicangle=italicangle
+ parameters.italicfactor=math.cos(math.rad(90+italicangle))
+ parameters.slant=- math.tan(italicangle*math.pi/180)
+ end
+ if monospaced then
+ parameters.space_stretch=0
+ parameters.space_shrink=0
+ elseif syncspace then
+ parameters.space_stretch=spaceunits/2
+ parameters.space_shrink=spaceunits/3
+ end
+ parameters.extra_space=parameters.space_shrink
+ if charxheight then
+ parameters.x_height=charxheight
+ else
+ local x=0x0078
+ if x then
+ local x=descriptions[x]
+ if x then
+ parameters.x_height=x.height
+ end
+ end
+ end
+ parameters.designsize=(designsize/10)*65536
+ parameters.ascender=abs(metadata.ascent or 0)
+ parameters.descender=abs(metadata.descent or 0)
+ parameters.units=units
+ properties.space=spacer
+ properties.encodingbytes=2
+ properties.format=data.format or otf_format(filename) or formats.otf
+ properties.noglyphnames=true
+ properties.filename=filename
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.psname=psname
+ properties.name=filename or fullname
+ if warnings and #warnings>0 then
+ report_otf("warnings for font: %s",filename)
+ report_otf()
+ for i=1,#warnings do
+ report_otf(" %s",warnings[i])
+ end
+ report_otf()
+ end
+ return {
+ characters=characters,
+ descriptions=descriptions,
+ parameters=parameters,
+ mathparameters=mathparameters,
+ resources=resources,
+ properties=properties,
+ goodies=goodies,
+ warnings=warnings,
+ }
+ end
+end
+local function otftotfm(specification)
+ local cache_id=specification.hash
+ local tfmdata=containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local name=specification.name
+ local sub=specification.sub
+ local filename=specification.filename
+ local features=specification.features.normal
+ local rawdata=otf.load(filename,sub,features and features.featurefile)
+ if rawdata and next(rawdata) then
+ local descriptions=rawdata.descriptions
+ local duplicates=rawdata.resources.duplicates
+ if duplicates then
+ local nofduplicates,nofduplicated=0,0
+ for parent,list in next,duplicates do
+ if type(list)=="table" then
+ local n=#list
+ for i=1,n do
+ local unicode=list[i]
+ if not descriptions[unicode] then
+ descriptions[unicode]=descriptions[parent]
+ nofduplicated=nofduplicated+1
+ end
+ end
+ nofduplicates=nofduplicates+n
+ else
+ if not descriptions[list] then
+ descriptions[list]=descriptions[parent]
+ nofduplicated=nofduplicated+1
+ end
+ nofduplicates=nofduplicates+1
+ end
+ end
+ if trace_otf and nofduplicated~=nofduplicates then
+ report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
+ end
+ end
+ rawdata.lookuphash={}
+ tfmdata=copytotfm(rawdata,cache_id)
+ if tfmdata and next(tfmdata) then
+ local features=constructors.checkedfeatures("otf",features)
+ local shared=tfmdata.shared
+ if not shared then
+ shared={}
+ tfmdata.shared=shared
+ end
+ shared.rawdata=rawdata
+ shared.dynamics={}
+ tfmdata.changed={}
+ shared.features=features
+ shared.processes=otf.setfeatures(tfmdata,features)
+ end
+ end
+ containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+end
+local function read_from_otf(specification)
+ local tfmdata=otftotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name=specification.name
+ tfmdata.properties.sub=specification.sub
+ tfmdata=constructors.scale(tfmdata,specification)
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
+ constructors.setname(tfmdata,specification)
+ fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
+ end
+ return tfmdata
+end
+local function checkmathsize(tfmdata,mathsize)
+ local mathdata=tfmdata.shared.rawdata.metadata.math
+ local mathsize=tonumber(mathsize)
+ if mathdata then
+ local parameters=tfmdata.parameters
+ parameters.scriptpercentage=mathdata.ScriptPercentScaleDown
+ parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown
+ parameters.mathsize=mathsize
+ end
+end
+registerotffeature {
+ name="mathsize",
+ description="apply mathsize specified in the font",
+ initializers={
+ base=checkmathsize,
+ node=checkmathsize,
+ }
+}
+function otf.collectlookups(rawdata,kind,script,language)
+ local sequences=rawdata.resources.sequences
+ if sequences then
+ local featuremap,featurelist={},{}
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local features=sequence.features
+ features=features and features[kind]
+ features=features and (features[script] or features[default] or features[wildcard])
+ features=features and (features[language] or features[default] or features[wildcard])
+ if features then
+ local subtables=sequence.subtables
+ if subtables then
+ for s=1,#subtables do
+ local ss=subtables[s]
+ if not featuremap[s] then
+ featuremap[ss]=true
+ featurelist[#featurelist+1]=ss
+ end
+ end
+ end
+ end
+ end
+ if #featurelist>0 then
+ return featuremap,featurelist
+ end
+ end
+ return nil,nil
+end
+local function check_otf(forced,specification,suffix)
+ local name=specification.name
+ if forced then
+ name=specification.forcedname
+ end
+ local fullname=findbinfile(name,suffix) or ""
+ if fullname=="" then
+ fullname=fonts.names.getfilename(name,suffix) or ""
+ end
+ if fullname~="" and not fonts.names.ignoredfile(fullname) then
+ specification.filename=fullname
+ return read_from_otf(specification)
+ end
+end
+local function opentypereader(specification,suffix)
+ local forced=specification.forced or ""
+ if formats[forced] then
+ return check_otf(true,specification,forced)
+ else
+ return check_otf(false,specification,suffix)
+ end
+end
+readers.opentype=opentypereader
+function readers.otf (specification) return opentypereader(specification,"otf") end
+function readers.ttf (specification) return opentypereader(specification,"ttf") end
+function readers.ttc (specification) return opentypereader(specification,"ttf") end
+function readers.dfont(specification) return opentypereader(specification,"ttf") end
+function otf.scriptandlanguage(tfmdata,attr)
+ local properties=tfmdata.properties
+ return properties.script or "dflt",properties.language or "dflt"
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otb']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat=table.concat
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring,rawget=type,next,tonumber,tostring,rawget
+local lpegmatch=lpeg.match
+local utfchar=utf.char
+local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end)
+local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end)
+local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end)
+local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end)
+local report_prepare=logs.reporter("fonts","otf prepare")
+local fonts=fonts
+local otf=fonts.handlers.otf
+local otffeatures=otf.features
+local registerotffeature=otffeatures.register
+otf.defaultbasealternate="none"
+local wildcard="*"
+local default="dflt"
+local formatters=string.formatters
+local f_unicode=formatters["%U"]
+local f_uniname=formatters["%U (%s)"]
+local f_unilist=formatters["% t (% t)"]
+local function gref(descriptions,n)
+ if type(n)=="number" then
+ local name=descriptions[n].name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num,nam,j={},{},0
+ for i=1,#n do
+ local ni=n[i]
+ if tonumber(ni) then
+ j=j+1
+ local di=descriptions[ni]
+ num[j]=f_unicode(ni)
+ nam[j]=di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in base mode tracing>"
+ end
+end
+local function cref(feature,lookuptags,lookupname)
+ if lookupname then
+ return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname])
+ else
+ return formatters["feature %a"](feature)
+ end
+end
+local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment)
+ report_prepare("%s: base alternate %s => %s (%S => %S)",
+ cref(feature,lookuptags,lookupname),
+ gref(descriptions,unicode),
+ replacement and gref(descriptions,replacement),
+ value,
+ comment)
+end
+local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution)
+ report_prepare("%s: base substitution %s => %S",
+ cref(feature,lookuptags,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,substitution))
+end
+local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature)
+ report_prepare("%s: base ligature %s => %S",
+ cref(feature,lookuptags,lookupname),
+ gref(descriptions,ligature),
+ gref(descriptions,unicode))
+end
+local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value)
+ report_prepare("%s: base kern %s + %s => %S",
+ cref(feature,lookuptags,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,otherunicode),
+ value)
+end
+local basemethods={}
+local basemethod="<unset>"
+local function applybasemethod(what,...)
+ local m=basemethods[basemethod][what]
+ if m then
+ return m(...)
+ end
+end
+local basehash,basehashes,applied={},1,{}
+local function registerbasehash(tfmdata)
+ local properties=tfmdata.properties
+ local hash=concat(applied," ")
+ local base=basehash[hash]
+ if not base then
+ basehashes=basehashes+1
+ base=basehashes
+ basehash[hash]=base
+ end
+ properties.basehash=base
+ properties.fullname=properties.fullname.."-"..base
+ applied={}
+end
+local function registerbasefeature(feature,value)
+ applied[#applied+1]=feature.."="..tostring(value)
+end
+local trace=false
+local function finalize_ligatures(tfmdata,ligatures)
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local unicodes=resources.unicodes
+ local private=resources.private
+ local alldone=false
+ while not alldone do
+ local done=0
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ if ligature then
+ local unicode,lookupdata=ligature[1],ligature[2]
+ if trace_ligatures_detail then
+ report_prepare("building % a into %a",lookupdata,unicode)
+ end
+ local size=#lookupdata
+ local firstcode=lookupdata[1]
+ local firstdata=characters[firstcode]
+ local okay=false
+ if firstdata then
+ local firstname="ctx_"..firstcode
+ for i=1,size-1 do
+ local firstdata=characters[firstcode]
+ if not firstdata then
+ firstcode=private
+ if trace_ligatures_detail then
+ report_prepare("defining %a as %a",firstname,firstcode)
+ end
+ unicodes[firstname]=firstcode
+ firstdata={ intermediate=true,ligatures={} }
+ characters[firstcode]=firstdata
+ descriptions[firstcode]={ name=firstname }
+ private=private+1
+ end
+ local target
+ local secondcode=lookupdata[i+1]
+ local secondname=firstname.."_"..secondcode
+ if i==size-1 then
+ target=unicode
+ if not rawget(unicodes,secondname) then
+ unicodes[secondname]=unicode
+ end
+ okay=true
+ else
+ target=rawget(unicodes,secondname)
+ if not target then
+ break
+ end
+ end
+ if trace_ligatures_detail then
+ report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
+ end
+ local firstligs=firstdata.ligatures
+ if firstligs then
+ firstligs[secondcode]={ char=target }
+ else
+ firstdata.ligatures={ [secondcode]={ char=target } }
+ end
+ firstcode=target
+ firstname=secondname
+ end
+ elseif trace_ligatures_detail then
+ report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target)
+ end
+ if okay then
+ ligatures[i]=false
+ done=done+1
+ end
+ end
+ end
+ alldone=done==0
+ end
+ if trace_ligatures_detail then
+ for k,v in table.sortedhash(characters) do
+ if v.ligatures then
+ table.print(v,k)
+ end
+ end
+ end
+ resources.private=private
+ return true
+ end
+end
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local properties=tfmdata.properties
+ local changed=tfmdata.changed
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local lookuptags=resources.lookuptags
+ local ligatures={}
+ local alternate=tonumber(value) or true and 1
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ local actions={
+ substitution=function(lookupdata,lookuptags,lookupname,description,unicode)
+ if trace_singles then
+ report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
+ end
+ changed[unicode]=lookupdata
+ end,
+ alternate=function(lookupdata,lookuptags,lookupname,description,unicode)
+ local replacement=lookupdata[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt=="first" then
+ replacement=lookupdata[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt=="last" then
+ replacement=lookupdata[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ end,
+ ligature=function(lookupdata,lookuptags,lookupname,description,unicode)
+ if trace_ligatures then
+ report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
+ end
+ ligatures[#ligatures+1]={ unicode,lookupdata }
+ end,
+ }
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local lookups=description.slookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookups[lookupname]
+ if lookupdata then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ action(lookupdata,lookuptags,lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookuplist=lookups[lookupname]
+ if lookuplist then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ for i=1,#lookuplist do
+ action(lookuplist[i],lookuptags,lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+ properties.hasligatures=finalize_ligatures(tfmdata,ligatures)
+end
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local properties=tfmdata.properties
+ local lookuptags=resources.lookuptags
+ local sharedkerns={}
+ local traceindeed=trace_baseinit and trace_kerns
+ local haskerns=false
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local rawkerns=description.kerns
+ if rawkerns then
+ local s=sharedkerns[rawkerns]
+ if s==false then
+ elseif s then
+ character.kerns=s
+ else
+ local newkerns=character.kerns
+ local done=false
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local kerns=rawkerns[lookup]
+ if kerns then
+ for otherunicode,value in next,kerns do
+ if value==0 then
+ elseif not newkerns then
+ newkerns={ [otherunicode]=value }
+ done=true
+ if traceindeed then
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
+ end
+ elseif not newkerns[otherunicode] then
+ newkerns[otherunicode]=value
+ done=true
+ if traceindeed then
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
+ end
+ end
+ end
+ end
+ end
+ if done then
+ sharedkerns[rawkerns]=newkerns
+ character.kerns=newkerns
+ haskerns=true
+ else
+ sharedkerns[rawkerns]=false
+ end
+ end
+ end
+ end
+ properties.haskerns=haskerns
+end
+basemethods.independent={
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
+}
+local function makefake(tfmdata,name,present)
+ local resources=tfmdata.resources
+ local private=resources.private
+ local character={ intermediate=true,ligatures={} }
+ resources.unicodes[name]=private
+ tfmdata.characters[private]=character
+ tfmdata.descriptions[private]={ name=name }
+ resources.private=private+1
+ present[name]=private
+ return character
+end
+local function make_1(present,tree,name)
+ for k,v in next,tree do
+ if k=="ligature" then
+ present[name]=v
+ else
+ make_1(present,v,name.."_"..k)
+ end
+ end
+end
+local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname)
+ for k,v in next,tree do
+ if k=="ligature" then
+ local character=characters[preceding]
+ if not character then
+ if trace_baseinit then
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding)
+ end
+ character=makefake(tfmdata,name,present)
+ end
+ local ligatures=character.ligatures
+ if ligatures then
+ ligatures[unicode]={ char=v }
+ else
+ character.ligatures={ [unicode]={ char=v } }
+ end
+ if done then
+ local d=done[lookupname]
+ if not d then
+ done[lookupname]={ "dummy",v }
+ else
+ d[#d+1]=v
+ end
+ end
+ else
+ local code=present[name] or unicode
+ local name=name.."_"..k
+ make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname)
+ end
+ end
+end
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local changed=tfmdata.changed
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local lookuptags=resources.lookuptags
+ local ligatures={}
+ local alternate=tonumber(value) or true and 1
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ local lookuptype=lookuptypes[lookupname]
+ for unicode,data in next,lookupdata do
+ if lookuptype=="substitution" then
+ if trace_singles then
+ report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data)
+ end
+ changed[unicode]=data
+ elseif lookuptype=="alternate" then
+ local replacement=data[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt=="first" then
+ replacement=data[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt=="last" then
+ replacement=data[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ elseif lookuptype=="ligature" then
+ ligatures[#ligatures+1]={ unicode,data,lookupname }
+ if trace_ligatures then
+ report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data)
+ end
+ end
+ end
+ end
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local present={}
+ local done=trace_baseinit and trace_ligatures and {}
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree=ligature[1],ligature[2]
+ make_1(present,tree,"ctx_"..unicode)
+ end
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3]
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname)
+ end
+ end
+end
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local properties=tfmdata.properties
+ local lookuphash=resources.lookuphash
+ local lookuptags=resources.lookuptags
+ local traceindeed=trace_baseinit and trace_kerns
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ for unicode,data in next,lookupdata do
+ local character=characters[unicode]
+ local kerns=character.kerns
+ if not kerns then
+ kerns={}
+ character.kerns=kerns
+ end
+ if traceindeed then
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern)
+ end
+ end
+ else
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ end
+ end
+ end
+ end
+ end
+end
+local function initializehashes(tfmdata)
+ nodeinitializers.features(tfmdata)
+end
+basemethods.shared={
+ initializehashes=initializehashes,
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
+}
+basemethod="independent"
+local function featuresinitializer(tfmdata,value)
+ if true then
+ local starttime=trace_preparing and os.clock()
+ local features=tfmdata.shared.features
+ local fullname=tfmdata.properties.fullname or "?"
+ if features then
+ applybasemethod("initializehashes",tfmdata)
+ local collectlookups=otf.collectlookups
+ local rawdata=tfmdata.shared.rawdata
+ local properties=tfmdata.properties
+ local script=properties.script
+ local language=properties.language
+ local basesubstitutions=rawdata.resources.features.gsub
+ local basepositionings=rawdata.resources.features.gpos
+ if basesubstitutions or basepositionings then
+ local sequences=tfmdata.resources.sequences
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local sfeatures=sequence.features
+ if sfeatures then
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local feature=order[i]
+ local value=features[feature]
+ if value then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if not validlookups then
+ elseif basesubstitutions and basesubstitutions[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value)
+ end
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ elseif basepositionings and basepositionings[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value)
+ end
+ applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ registerbasehash(tfmdata)
+ end
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname)
+ end
+ end
+end
+registerotffeature {
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ base=featuresinitializer,
+ }
+}
+directives.register("fonts.otf.loader.basemethod",function(v)
+ if basemethods[v] then
+ basemethod=v
+ end
+end)
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-inj']={
+ version=1.001,
+ comment="companion to font-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+if not nodes.properties then return end
+local next,rawget=next,rawget
+local utfchar=utf.char
+local fastcopy=table.fastcopy
+local trace_injections=false trackers.register("fonts.injections",function(v) trace_injections=v end)
+local report_injections=logs.reporter("fonts","injections")
+local attributes,nodes,node=attributes,nodes,node
+fonts=fonts
+local fontdata=fonts.hashes.identifiers
+nodes.injections=nodes.injections or {}
+local injections=nodes.injections
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local disc_code=nodecodes.disc
+local kern_code=nodecodes.kern
+local nuts=nodes.nuts
+local nodepool=nuts.pool
+local newkern=nodepool.kern
+local tonode=nuts.tonode
+local tonut=nuts.tonut
+local getfield=nuts.getfield
+local setfield=nuts.setfield
+local getnext=nuts.getnext
+local getprev=nuts.getprev
+local getid=nuts.getid
+local getfont=nuts.getfont
+local getsubtype=nuts.getsubtype
+local getchar=nuts.getchar
+local traverse_id=nuts.traverse_id
+local insert_node_before=nuts.insert_before
+local insert_node_after=nuts.insert_after
+local find_tail=nuts.tail
+local properties=nodes.properties.data
+function injections.installnewkern(nk)
+ newkern=nk or newkern
+end
+local nofregisteredkerns=0
+local nofregisteredpairs=0
+local nofregisteredmarks=0
+local nofregisteredcursives=0
+local keepregisteredcounts=false
+function injections.keepcounts()
+ keepregisteredcounts=true
+end
+function injections.resetcounts()
+ nofregisteredkerns=0
+ nofregisteredpairs=0
+ nofregisteredmarks=0
+ nofregisteredcursives=0
+ keepregisteredcounts=false
+end
+function injections.reset(n)
+ local p=rawget(properties,n)
+ if p and rawget(p,"injections") then
+ p.injections=nil
+ end
+end
+function injections.copy(target,source)
+ local sp=rawget(properties,source)
+ if sp then
+ local tp=rawget(properties,target)
+ local si=rawget(sp,"injections")
+ if si then
+ si=fastcopy(si)
+ if tp then
+ tp.injections=si
+ else
+ propertydata[target]={
+ injections=si,
+ }
+ end
+ else
+ if tp then
+ tp.injections=nil
+ end
+ end
+ end
+end
+function injections.setligaindex(n,index)
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.ligaindex=index
+ else
+ p.injections={
+ ligaindex=index
+ }
+ end
+ else
+ properties[n]={
+ injections={
+ ligaindex=index
+ }
+ }
+ end
+end
+function injections.getligaindex(n,default)
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ return i.ligaindex or default
+ end
+ end
+ return default
+end
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+ local dx=factor*(exit[1]-entry[1])
+ local dy=-factor*(exit[2]-entry[2])
+ local ws,wn=tfmstart.width,tfmnext.width
+ nofregisteredcursives=nofregisteredcursives+1
+ if rlmode<0 then
+ dx=-(dx+wn)
+ else
+ dx=dx-ws
+ end
+ local p=rawget(properties,start)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.cursiveanchor=true
+ else
+ p.injections={
+ cursiveanchor=true,
+ }
+ end
+ else
+ properties[start]={
+ injections={
+ cursiveanchor=true,
+ },
+ }
+ end
+ local p=rawget(properties,nxt)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.cursivex=dx
+ i.cursivey=dy
+ else
+ p.injections={
+ cursivex=dx,
+ cursivey=dy,
+ }
+ end
+ else
+ properties[nxt]={
+ injections={
+ cursivex=dx,
+ cursivey=dy,
+ },
+ }
+ end
+ return dx,dy,nofregisteredcursives
+end
+function injections.setpair(current,factor,rlmode,r2lflag,spec,injection)
+ local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4]
+ if x~=0 or w~=0 or y~=0 or h~=0 then
+ local yoffset=y-h
+ local leftkern=x
+ local rightkern=w-x
+ if leftkern~=0 or rightkern~=0 or yoffset~=0 then
+ nofregisteredpairs=nofregisteredpairs+1
+ if rlmode and rlmode<0 then
+ leftkern,rightkern=rightkern,leftkern
+ end
+ local p=rawget(properties,current)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ if leftkern~=0 then
+ i.leftkern=(i.leftkern or 0)+leftkern
+ end
+ if rightkern~=0 then
+ i.rightkern=(i.rightkern or 0)+rightkern
+ end
+ if yoffset~=0 then
+ i.yoffset=(i.yoffset or 0)+yoffset
+ end
+ elseif leftkern~=0 or rightkern~=0 then
+ p.injections={
+ leftkern=leftkern,
+ rightkern=rightkern,
+ yoffset=yoffset,
+ }
+ else
+ p.injections={
+ yoffset=yoffset,
+ }
+ end
+ elseif leftkern~=0 or rightkern~=0 then
+ properties[current]={
+ injections={
+ leftkern=leftkern,
+ rightkern=rightkern,
+ yoffset=yoffset,
+ },
+ }
+ else
+ properties[current]={
+ injections={
+ yoffset=yoffset,
+ },
+ }
+ end
+ return x,y,w,h,nofregisteredpairs
+ end
+ end
+ return x,y,w,h
+end
+function injections.setkern(current,factor,rlmode,x,injection)
+ local dx=factor*x
+ if dx~=0 then
+ nofregisteredkerns=nofregisteredkerns+1
+ local p=rawget(properties,current)
+ if not injection then
+ injection="injections"
+ end
+ if p then
+ local i=rawget(p,injection)
+ if i then
+ i.leftkern=dx+(i.leftkern or 0)
+ else
+ p[injection]={
+ leftkern=dx,
+ }
+ end
+ else
+ properties[current]={
+ [injection]={
+ leftkern=dx,
+ },
+ }
+ end
+ return dx,nofregisteredkerns
+ else
+ return 0,0
+ end
+end
+function injections.setmark(start,base,factor,rlmode,ba,ma,tfmbase)
+ local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
+ nofregisteredmarks=nofregisteredmarks+1
+ if rlmode>=0 then
+ dx=tfmbase.width-dx
+ end
+ local p=rawget(properties,start)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.markx=dx
+ i.marky=dy
+ i.markdir=rlmode or 0
+ i.markbase=nofregisteredmarks
+ i.markbasenode=base
+ else
+ p.injections={
+ markx=dx,
+ marky=dy,
+ markdir=rlmode or 0,
+ markbase=nofregisteredmarks,
+ markbasenode=base,
+ }
+ end
+ else
+ properties[start]={
+ injections={
+ markx=dx,
+ marky=dy,
+ markdir=rlmode or 0,
+ markbase=nofregisteredmarks,
+ markbasenode=base,
+ },
+ }
+ end
+ return dx,dy,nofregisteredmarks
+end
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+local function showchar(n,nested)
+ local char=getchar(n)
+ report_injections("%wfont %s, char %U, glyph %c",nested and 2 or 0,getfont(n),char,char)
+end
+local function show(n,what,nested,symbol)
+ if n then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,what)
+ if i then
+ local leftkern=i.leftkern or 0
+ local rightkern=i.rightkern or 0
+ local yoffset=i.yoffset or 0
+ local markx=i.markx or 0
+ local marky=i.marky or 0
+ local markdir=i.markdir or 0
+ local markbase=i.markbase or 0
+ local cursivex=i.cursivex or 0
+ local cursivey=i.cursivey or 0
+ local ligaindex=i.ligaindex or 0
+ local margin=nested and 4 or 2
+ if rightkern~=0 or yoffset~=0 then
+ report_injections("%w%s pair: lx %p, rx %p, dy %p",margin,symbol,leftkern,rightkern,yoffset)
+ elseif leftkern~=0 then
+ report_injections("%w%s kern: dx %p",margin,symbol,leftkern)
+ end
+ if markx~=0 or marky~=0 or markbase~=0 then
+ report_injections("%w%s mark: dx %p, dy %p, dir %s, base %s",margin,symbol,markx,marky,markdir,markbase~=0 and "yes" or "no")
+ end
+ if cursivex~=0 or cursivey~=0 then
+ report_injections("%w%s curs: dx %p, dy %p",margin,symbol,cursivex,cursivey)
+ end
+ if ligaindex~=0 then
+ report_injections("%w%s liga: index %i",margin,symbol,ligaindex)
+ end
+ end
+ end
+ end
+end
+local function showsub(n,what,where)
+ report_injections("begin subrun: %s",where)
+ for n in traverse_id(glyph_code,n) do
+ showchar(n,where)
+ show(n,what,where," ")
+ end
+ report_injections("end subrun")
+end
+local function trace(head,where)
+ report_injections("begin run %s: %s kerns, %s pairs, %s marks and %s cursives registered",
+ where or "",nofregisteredkerns,nofregisteredpairs,nofregisteredmarks,nofregisteredcursives)
+ local n=head
+ while n do
+ local id=getid(n)
+ if id==glyph_code then
+ showchar(n)
+ show(n,"injections",false," ")
+ show(n,"preinjections",false,"<")
+ show(n,"postinjections",false,">")
+ show(n,"replaceinjections",false,"=")
+ elseif id==disc_code then
+ local pre=getfield(n,"pre")
+ local post=getfield(n,"post")
+ local replace=getfield(n,"replace")
+ if pre then
+ showsub(pre,"preinjections","pre")
+ end
+ if post then
+ showsub(post,"postinjections","post")
+ end
+ if replace then
+ showsub(replace,"replaceinjections","replace")
+ end
+ end
+ n=getnext(n)
+ end
+ report_injections("end run")
+end
+local function show_result(head)
+ local current=head
+ local skipping=false
+ while current do
+ local id=getid(current)
+ if id==glyph_code then
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",
+ getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
+ skipping=false
+ elseif id==kern_code then
+ report_injections("kern: %p",getfield(current,"kern"))
+ skipping=false
+ elseif not skipping then
+ report_injections()
+ skipping=true
+ end
+ current=getnext(current)
+ end
+end
+local function collect_glyphs_1(head)
+ local glyphs,nofglyphs={},0
+ local marks,nofmarks={},0
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if getsubtype(n)<256 then
+ local f=getfont(n)
+ if f~=nf then
+ nf=f
+ tm=fontdata[nf].resources.marks
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks=nofmarks+1
+ marks[nofmarks]=n
+ else
+ nofglyphs=nofglyphs+1
+ glyphs[nofglyphs]=n
+ end
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ end
+ end
+ return glyphs,nofglyphs,marks,nofmarks
+end
+local function collect_glyphs_2(head)
+ local glyphs,nofglyphs={},0
+ local marks,nofmarks={},0
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if getsubtype(n)<256 then
+ local f=getfont(n)
+ if f~=nf then
+ nf=f
+ tm=fontdata[nf].resources.marks
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks=nofmarks+1
+ marks[nofmarks]=n
+ else
+ nofglyphs=nofglyphs+1
+ glyphs[nofglyphs]=n
+ end
+ end
+ end
+ return glyphs,nofglyphs,marks,nofmarks
+end
+local function inject_marks(marks,nofmarks)
+ for i=1,nofmarks do
+ local n=marks[i]
+ local pn=rawget(properties,n)
+ if pn then
+ pn=rawget(pn,"injections")
+ if pn then
+ local p=pn.markbasenode
+ if p then
+ local px=getfield(p,"xoffset")
+ local ox=0
+ local rightkern=nil
+ local pp=rawget(properties,p)
+ if pp then
+ pp=rawget(pp,"injections")
+ if pp then
+ rightkern=pp.rightkern
+ end
+ end
+ if rightkern then
+ if pn.markdir<0 then
+ ox=px-pn.markx-rightkern
+ else
+ local leftkern=pp.leftkern
+ if leftkern then
+ ox=px-pn.markx
+ else
+ ox=px-pn.markx-leftkern
+ end
+ end
+ else
+ ox=px-pn.markx
+ local wn=getfield(n,"width")
+ if wn~=0 then
+ pn.leftkern=-wn/2
+ pn.rightkern=-wn/2
+ end
+ end
+ setfield(n,"xoffset",ox)
+ local py=getfield(p,"yoffset")
+ local oy=0
+ if marks[p] then
+ oy=py+pn.marky
+ else
+ oy=getfield(n,"yoffset")+py+pn.marky
+ end
+ setfield(n,"yoffset",oy)
+ else
+ end
+ end
+ end
+ end
+end
+local function inject_cursives(glyphs,nofglyphs)
+ local cursiveanchor,lastanchor=nil,nil
+ local minc,maxc,last=0,0,nil
+ for i=1,nofglyphs do
+ local n=glyphs[i]
+ local pn=rawget(properties,n)
+ if pn then
+ pn=rawget(pn,"injections")
+ end
+ if pn then
+ local cursivex=pn.cursivex
+ if cursivex then
+ if cursiveanchor then
+ if cursivex~=0 then
+ pn.leftkern=(pn.leftkern or 0)+cursivex
+ end
+ if lastanchor then
+ if maxc==0 then
+ minc=lastanchor
+ end
+ maxc=lastanchor
+ properties[cursiveanchor].cursivedy=pn.cursivey
+ end
+ last=n
+ else
+ maxc=0
+ end
+ elseif maxc>0 then
+ local ny=getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
+ end
+ maxc=0
+ end
+ if pn.cursiveanchor then
+ cursiveanchor=n
+ lastanchor=i
+ else
+ cursiveanchor=nil
+ lastanchor=nil
+ if maxc>0 then
+ local ny=getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
+ end
+ maxc=0
+ end
+ end
+ elseif maxc>0 then
+ local ny=getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",getfield(ti,"yoffset")+ny)
+ end
+ maxc=0
+ cursiveanchor=nil
+ lastanchor=nil
+ end
+ end
+ if last and maxc>0 then
+ local ny=getfield(last,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
+ end
+ end
+end
+local function inject_kerns(head,list,length)
+ for i=1,length do
+ local n=list[i]
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ insert_node_before(head,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ end
+ end
+ end
+ end
+end
+local function inject_everything(head,where)
+ head=tonut(head)
+ if trace_injections then
+ trace(head,"everything")
+ end
+ local glyphs,nofglyphs,marks,nofmarks
+ if nofregisteredpairs>0 then
+ glyphs,nofglyphs,marks,nofmarks=collect_glyphs_1(head)
+ else
+ glyphs,nofglyphs,marks,nofmarks=collect_glyphs_2(head)
+ end
+ if nofglyphs>0 then
+ if nofregisteredcursives>0 then
+ inject_cursives(glyphs,nofglyphs)
+ end
+ if nofregisteredmarks>0 then
+ inject_marks(marks,nofmarks)
+ end
+ inject_kerns(head,glyphs,nofglyphs)
+ end
+ if nofmarks>0 then
+ inject_kerns(head,marks,nofmarks)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts=false
+ else
+ nofregisteredkerns=0
+ nofregisteredpairs=0
+ nofregisteredmarks=0
+ nofregisteredcursives=0
+ end
+ return tonode(head),true
+end
+local function inject_kerns_only(head,where)
+ head=tonut(head)
+ if trace_injections then
+ trace(head,"kerns")
+ end
+ local n=head
+ local p=nil
+ while n do
+ local id=getid(n)
+ if id==glyph_code then
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ if p then
+ local d=getfield(p,"post")
+ if d then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ end
+ local d=getfield(p,"replace")
+ if d then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ end
+ end
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ head=insert_node_before(head,n,newkern(leftkern))
+ end
+ end
+ end
+ end
+ else
+ break
+ end
+ p=nil
+ elseif id==disc_code then
+ local d=getfield(n,"pre")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"preinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"pre",h)
+ end
+ end
+ local d=getfield(n,"post")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"post",h)
+ end
+ end
+ local d=getfield(n,"replace")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"replace",h)
+ end
+ end
+ p=n
+ else
+ p=nil
+ end
+ n=getnext(n)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts=false
+ else
+ nofregisteredkerns=0
+ end
+ return tonode(head),true
+end
+local function inject_pairs_only(head,where)
+ head=tonut(head)
+ if trace_injections then
+ trace(head,"pairs")
+ end
+ local n=head
+ local p=nil
+ while n do
+ local id=getid(n)
+ if id==glyph_code then
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ if p then
+ local d=getfield(p,"post")
+ if d then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ end
+ local d=getfield(p,"replace")
+ if d then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ end
+ end
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ insert_node_before(head,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
+ end
+ end
+ end
+ end
+ else
+ break
+ end
+ p=nil
+ elseif id==disc_code then
+ local d=getfield(n,"pre")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"preinjections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"pre",h)
+ end
+ end
+ local d=getfield(n,"post")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"postinjections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"post",h)
+ end
+ end
+ local d=getfield(n,"replace")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"replaceinjections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"replace",h)
+ end
+ end
+ p=n
+ else
+ p=nil
+ end
+ n=getnext(n)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts=false
+ else
+ nofregisteredpairs=0
+ nofregisteredkerns=0
+ end
+ return tonode(head),true
+end
+function injections.handler(head,where)
+ if nofregisteredmarks>0 or nofregisteredcursives>0 then
+ return inject_everything(head,where)
+ elseif nofregisteredpairs>0 then
+ return inject_pairs_only(head,where)
+ elseif nofregisteredkerns>0 then
+ return inject_kerns_only(head,where)
+ else
+ return head,false
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otx']={
+ version=1.001,
+ comment="companion to font-otf.lua (analysing)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type=type
+if not trackers then trackers={ register=function() end } end
+local fonts,nodes,node=fonts,nodes,node
+local allocate=utilities.storage.allocate
+local otf=fonts.handlers.otf
+local analyzers=fonts.analyzers
+local initializers=allocate()
+local methods=allocate()
+analyzers.initializers=initializers
+analyzers.methods=methods
+analyzers.useunicodemarks=false
+local a_state=attributes.private('state')
+local nuts=nodes.nuts
+local tonut=nuts.tonut
+local getfield=nuts.getfield
+local getnext=nuts.getnext
+local getprev=nuts.getprev
+local getid=nuts.getid
+local getprop=nuts.getprop
+local setprop=nuts.setprop
+local getfont=nuts.getfont
+local getsubtype=nuts.getsubtype
+local getchar=nuts.getchar
+local traverse_id=nuts.traverse_id
+local traverse_node_list=nuts.traverse
+local end_of_math=nuts.end_of_math
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local disc_code=nodecodes.disc
+local math_code=nodecodes.math
+local fontdata=fonts.hashes.identifiers
+local categories=characters and characters.categories or {}
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local s_init=1 local s_rphf=7
+local s_medi=2 local s_half=8
+local s_fina=3 local s_pref=9
+local s_isol=4 local s_blwf=10
+local s_mark=5 local s_pstf=11
+local s_rest=6
+local states={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
+ mark=s_mark,
+ rest=s_rest,
+ rphf=s_rphf,
+ half=s_half,
+ pref=s_pref,
+ blwf=s_blwf,
+ pstf=s_pstf,
+}
+local features={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
+ rphf=s_rphf,
+ half=s_half,
+ pref=s_pref,
+ blwf=s_blwf,
+ pstf=s_pstf,
+}
+analyzers.states=states
+analyzers.features=features
+function analyzers.setstate(head,font)
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local descriptions=tfmdata.descriptions
+ local first,last,current,n,done=nil,nil,head,0,false
+ current=tonut(current)
+ while current do
+ local id=getid(current)
+ if id==glyph_code and getfont(current)==font then
+ done=true
+ local char=getchar(current)
+ local d=descriptions[char]
+ if d then
+ if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then
+ done=true
+ setprop(current,a_state,s_mark)
+ elseif n==0 then
+ first,last,n=current,current,1
+ setprop(current,a_state,s_init)
+ else
+ last,n=current,n+1
+ setprop(current,a_state,s_medi)
+ end
+ else
+ if first and first==last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ first,last,n=nil,nil,0
+ end
+ elseif id==disc_code then
+ setprop(current,a_state,s_medi)
+ last=current
+ else
+ if first and first==last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ first,last,n=nil,nil,0
+ if id==math_code then
+ current=end_of_math(current)
+ end
+ end
+ current=getnext(current)
+ end
+ if first and first==last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ return head,done
+end
+local function analyzeinitializer(tfmdata,value)
+ local script,language=otf.scriptandlanguage(tfmdata)
+ local action=initializers[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(tfmdata,value)
+ else
+ local action=action[language]
+ if action then
+ return action(tfmdata,value)
+ end
+ end
+end
+local function analyzeprocessor(head,font,attr)
+ local tfmdata=fontdata[font]
+ local script,language=otf.scriptandlanguage(tfmdata,attr)
+ local action=methods[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(head,font,attr)
+ else
+ action=action[language]
+ if action then
+ return action(head,font,attr)
+ end
+ end
+ return head,false
+end
+registerotffeature {
+ name="analyze",
+ description="analysis of character classes",
+ default=true,
+ initializers={
+ node=analyzeinitializer,
+ },
+ processors={
+ position=1,
+ node=analyzeprocessor,
+ }
+}
+methods.latn=analyzers.setstate
+local tatweel=0x0640
+local zwnj=0x200C
+local zwj=0x200D
+local isolated={
+ [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true,
+ [0x0604]=true,
+ [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true,
+ [0x06DD]=true,
+ [0x0856]=true,[0x0858]=true,[0x0857]=true,
+ [0x07FA]=true,
+ [zwnj]=true,
+ [0x08AD]=true,
+}
+local final={
+ [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true,
+ [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true,
+ [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true,
+ [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true,
+ [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true,
+ [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true,
+ [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true,
+ [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true,
+ [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true,
+ [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true,
+ [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true,
+ [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true,
+ [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true,
+ [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true,
+ [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true,
+ [0x0778]=true,[0x0779]=true,
+ [0x08AA]=true,[0x08AB]=true,[0x08AC]=true,
+ [0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true,
+ [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true,
+ [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true,
+ [0x072C]=true,[0x071E]=true,
+ [0x072F]=true,[0x074D]=true,
+ [0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true,
+ [0x084F]=true,
+ [0x08AE]=true,[0x08B1]=true,[0x08B2]=true,
+}
+local medial={
+ [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true,
+ [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true,
+ [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true,
+ [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true,
+ [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true,
+ [0x0641]=true,[0x0642]=true,[0x0643]=true,
+ [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true,
+ [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true,
+ [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true,
+ [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true,
+ [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true,
+ [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true,
+ [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true,
+ [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true,
+ [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true,
+ [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true,
+ [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true,
+ [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true,
+ [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true,
+ [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true,
+ [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true,
+ [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true,
+ [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true,
+ [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true,
+ [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true,
+ [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true,
+ [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true,
+ [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true,
+ [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true,
+ [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true,
+ [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true,
+ [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true,
+ [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true,
+ [0x077E]=true,[0x077F]=true,
+ [0x08A0]=true,[0x08A2]=true,[0x08A4]=true,[0x08A5]=true,
+ [0x08A6]=true,[0x0620]=true,[0x08A8]=true,[0x08A9]=true,
+ [0x08A7]=true,[0x08A3]=true,
+ [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true,
+ [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true,
+ [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true,
+ [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true,
+ [0x0729]=true,[0x072B]=true,[0x072D]=true,[0x072E]=true,
+ [0x074E]=true,[0x074F]=true,
+ [0x0841]=true,[0x0842]=true,[0x0843]=true,[0x0844]=true,
+ [0x0845]=true,[0x0847]=true,[0x0848]=true,[0x0855]=true,
+ [0x0851]=true,[0x084E]=true,[0x084D]=true,[0x084A]=true,
+ [0x084B]=true,[0x084C]=true,[0x0850]=true,[0x0852]=true,
+ [0x0853]=true,
+ [0x07D7]=true,[0x07E8]=true,[0x07D9]=true,[0x07EA]=true,
+ [0x07CA]=true,[0x07DB]=true,[0x07CC]=true,[0x07DD]=true,
+ [0x07CE]=true,[0x07DF]=true,[0x07D4]=true,[0x07E5]=true,
+ [0x07E9]=true,[0x07E7]=true,[0x07E3]=true,[0x07E2]=true,
+ [0x07E0]=true,[0x07E1]=true,[0x07DE]=true,[0x07DC]=true,
+ [0x07D1]=true,[0x07DA]=true,[0x07D8]=true,[0x07D6]=true,
+ [0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true,
+ [0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true,
+ [0x07E6]=true,
+ [tatweel]=true,[zwj]=true,
+ [0x08A1]=true,[0x08AF]=true,[0x08B0]=true,
+}
+local arab_warned={}
+local function warning(current,what)
+ local char=getchar(current)
+ if not arab_warned[char] then
+ log.report("analyze","arab: character %C has no %a class",char,what)
+ arab_warned[char]=true
+ end
+end
+local function finish(first,last)
+ if last then
+ if first==last then
+ local fc=getchar(first)
+ if medial[fc] or final[fc] then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ else
+ local lc=getchar(last)
+ if medial[lc] or final[lc] then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ end
+ first,last=nil,nil
+ elseif first then
+ local fc=getchar(first)
+ if medial[fc] or final[fc] then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ first=nil
+ end
+ return first,last
+end
+function methods.arab(head,font,attr)
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local marks=tfmdata.resources.marks
+ local first,last,current,done=nil,nil,head,false
+ current=tonut(current)
+ while current do
+ local id=getid(current)
+ if id==glyph_code and getfont(current)==font and getsubtype(current)<256 and not getprop(current,a_state) then
+ done=true
+ local char=getchar(current)
+ if marks[char] or (useunicodemarks and categories[char]=="mn") then
+ setprop(current,a_state,s_mark)
+ elseif isolated[char] then
+ first,last=finish(first,last)
+ setprop(current,a_state,s_isol)
+ first,last=nil,nil
+ elseif not first then
+ if medial[char] then
+ setprop(current,a_state,s_init)
+ first,last=first or current,current
+ elseif final[char] then
+ setprop(current,a_state,s_isol)
+ first,last=nil,nil
+ else
+ first,last=finish(first,last)
+ end
+ elseif medial[char] then
+ first,last=first or current,current
+ setprop(current,a_state,s_medi)
+ elseif final[char] then
+ if getprop(last,a_state)~=s_init then
+ setprop(last,a_state,s_medi)
+ end
+ setprop(current,a_state,s_fina)
+ first,last=nil,nil
+ elseif char>=0x0600 and char<=0x06FF then
+ setprop(current,a_state,s_rest)
+ first,last=finish(first,last)
+ else
+ first,last=finish(first,last)
+ end
+ else
+ if first or last then
+ first,last=finish(first,last)
+ end
+ if id==math_code then
+ current=end_of_math(current)
+ end
+ end
+ current=getnext(current)
+ end
+ if first or last then
+ finish(first,last)
+ end
+ return head,done
+end
+methods.syrc=methods.arab
+methods.mand=methods.arab
+methods.nko=methods.arab
+directives.register("otf.analyze.useunicodemarks",function(v)
+ analyzers.useunicodemarks=v
+end)
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otn']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local concat,insert,remove=table.concat,table.insert,table.remove
+local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local lpegmatch=lpeg.match
+local random=math.random
+local formatters=string.formatters
+local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes
+local registertracker=trackers.register
+local fonts=fonts
+local otf=fonts.handlers.otf
+local trace_lookups=false registertracker("otf.lookups",function(v) trace_lookups=v end)
+local trace_singles=false registertracker("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false registertracker("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false registertracker("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false registertracker("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_contexts=false registertracker("otf.contexts",function(v) trace_contexts=v end)
+local trace_marks=false registertracker("otf.marks",function(v) trace_marks=v end)
+local trace_kerns=false registertracker("otf.kerns",function(v) trace_kerns=v end)
+local trace_cursive=false registertracker("otf.cursive",function(v) trace_cursive=v end)
+local trace_preparing=false registertracker("otf.preparing",function(v) trace_preparing=v end)
+local trace_bugs=false registertracker("otf.bugs",function(v) trace_bugs=v end)
+local trace_details=false registertracker("otf.details",function(v) trace_details=v end)
+local trace_applied=false registertracker("otf.applied",function(v) trace_applied=v end)
+local trace_steps=false registertracker("otf.steps",function(v) trace_steps=v end)
+local trace_skips=false registertracker("otf.skips",function(v) trace_skips=v end)
+local trace_directions=false registertracker("otf.directions",function(v) trace_directions=v end)
+local report_direct=logs.reporter("fonts","otf direct")
+local report_subchain=logs.reporter("fonts","otf subchain")
+local report_chain=logs.reporter("fonts","otf chain")
+local report_process=logs.reporter("fonts","otf process")
+local report_prepare=logs.reporter("fonts","otf prepare")
+local report_warning=logs.reporter("fonts","otf warning")
+local report_run=logs.reporter("fonts","otf run")
+registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end)
+registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end)
+registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
+registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
+registertracker("otf.actions","otf.replacements,otf.positions")
+registertracker("otf.injections","nodes.injections")
+registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
+local nuts=nodes.nuts
+local tonode=nuts.tonode
+local tonut=nuts.tonut
+local getfield=nuts.getfield
+local setfield=nuts.setfield
+local getnext=nuts.getnext
+local getprev=nuts.getprev
+local getid=nuts.getid
+local getattr=nuts.getattr
+local setattr=nuts.setattr
+local getprop=nuts.getprop
+local setprop=nuts.setprop
+local getfont=nuts.getfont
+local getsubtype=nuts.getsubtype
+local getchar=nuts.getchar
+local insert_node_before=nuts.insert_before
+local insert_node_after=nuts.insert_after
+local delete_node=nuts.delete
+local remove_node=nuts.remove
+local copy_node=nuts.copy
+local copy_node_list=nuts.copy_list
+local find_node_tail=nuts.tail
+local flush_node_list=nuts.flush_list
+local free_node=nuts.free
+local end_of_math=nuts.end_of_math
+local traverse_nodes=nuts.traverse
+local traverse_id=nuts.traverse_id
+local setmetatableindex=table.setmetatableindex
+local zwnj=0x200C
+local zwj=0x200D
+local wildcard="*"
+local default="dflt"
+local nodecodes=nodes.nodecodes
+local whatcodes=nodes.whatcodes
+local glyphcodes=nodes.glyphcodes
+local disccodes=nodes.disccodes
+local glyph_code=nodecodes.glyph
+local glue_code=nodecodes.glue
+local disc_code=nodecodes.disc
+local whatsit_code=nodecodes.whatsit
+local math_code=nodecodes.math
+local dir_code=whatcodes.dir
+local localpar_code=whatcodes.localpar
+local discretionary_code=disccodes.discretionary
+local regular_code=disccodes.regular
+local automatic_code=disccodes.automatic
+local ligature_code=glyphcodes.ligature
+local privateattribute=attributes.private
+local a_state=privateattribute('state')
+local a_cursbase=privateattribute('cursbase')
+local injections=nodes.injections
+local setmark=injections.setmark
+local setcursive=injections.setcursive
+local setkern=injections.setkern
+local setpair=injections.setpair
+local resetinjection=injections.reset
+local copyinjection=injections.copy
+local setligaindex=injections.setligaindex
+local getligaindex=injections.getligaindex
+local cursonce=true
+local fonthashes=fonts.hashes
+local fontdata=fonthashes.identifiers
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local onetimemessage=fonts.loggers.onetimemessage or function() end
+otf.defaultnodealternate="none"
+local tfmdata=false
+local characters=false
+local descriptions=false
+local resources=false
+local marks=false
+local currentfont=false
+local lookuptable=false
+local anchorlookups=false
+local lookuptypes=false
+local lookuptags=false
+local handlers={}
+local rlmode=0
+local featurevalue=false
+local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
+local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
+local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_direct(...)
+end
+local function logwarning(...)
+ report_direct(...)
+end
+local f_unicode=formatters["%U"]
+local f_uniname=formatters["%U (%s)"]
+local f_unilist=formatters["% t (% t)"]
+local function gref(n)
+ if type(n)=="number" then
+ local description=descriptions[n]
+ local name=description and description.name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num,nam={},{}
+ for i=1,#n do
+ local ni=n[i]
+ if tonumber(ni) then
+ local di=descriptions[ni]
+ num[i]=f_unicode(ni)
+ nam[i]=di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in node mode tracing>"
+ end
+end
+local function cref(kind,chainname,chainlookupname,lookupname,index)
+ if index then
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index)
+ elseif lookupname then
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname])
+ elseif chainlookupname then
+ return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname])
+ elseif chainname then
+ return formatters["feature %a, chain %a"](kind,lookuptags[chainname])
+ else
+ return formatters["feature %a"](kind)
+ end
+end
+local function pref(kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname])
+end
+local function copy_glyph(g)
+ local components=getfield(g,"components")
+ if components then
+ setfield(g,"components",nil)
+ local n=copy_node(g)
+ copyinjection(n,g)
+ setfield(g,"components",components)
+ return n
+ else
+ local n=copy_node(g)
+ copyinjection(n,g)
+ return n
+ end
+end
+local function markstoligature(kind,lookupname,head,start,stop,char)
+ if start==stop and getchar(start)==char then
+ return head,start
+ else
+ local prev=getprev(start)
+ local next=getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
+ local base=copy_glyph(start)
+ if head==start then
+ head=base
+ end
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start)
+ if prev then
+ setfield(prev,"next",base)
+ end
+ if next then
+ setfield(next,"prev",base)
+ end
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
+ return head,base
+ end
+end
+local function getcomponentindex(start)
+ if getid(start)~=glyph_code then
+ return 0
+ elseif getsubtype(start)==ligature_code then
+ local i=0
+ local components=getfield(start,"components")
+ while components do
+ i=i+getcomponentindex(components)
+ components=getnext(components)
+ end
+ return i
+ elseif not marks[getchar(start)] then
+ return 1
+ else
+ return 0
+ end
+end
+local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound)
+ if start==stop and getchar(start)==char then
+ resetinjection(start)
+ setfield(start,"char",char)
+ return head,start
+ end
+ local prev=getprev(start)
+ local next=getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
+ local base=copy_glyph(start)
+ if start==head then
+ head=base
+ end
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start)
+ if prev then
+ setfield(prev,"next",base)
+ end
+ if next then
+ setfield(next,"prev",base)
+ end
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
+ if not discfound then
+ local deletemarks=markflag~="mark"
+ local components=start
+ local baseindex=0
+ local componentindex=0
+ local head=base
+ local current=base
+ while start do
+ local char=getchar(start)
+ if not marks[char] then
+ baseindex=baseindex+componentindex
+ componentindex=getcomponentindex(start)
+ elseif not deletemarks then
+ setligaindex(start,baseindex+getligaindex(start,componentindex))
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
+ end
+ head,current=insert_node_after(head,current,copy_node(start))
+ elseif trace_marks then
+ logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
+ end
+ start=getnext(start)
+ end
+ local start=getnext(current)
+ while start and getid(start)==glyph_code do
+ local char=getchar(start)
+ if marks[char] then
+ setligaindex(start,baseindex+getligaindex(start,componentindex))
+ if trace_marks then
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
+ end
+ else
+ break
+ end
+ start=getnext(start)
+ end
+ end
+ return head,base
+end
+function handlers.gsub_single(head,start,kind,lookupname,replacement)
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
+ end
+ resetinjection(start)
+ setfield(start,"char",replacement)
+ return head,start,true
+end
+local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
+ local n=#alternatives
+ if value=="random" then
+ local r=random(1,n)
+ return alternatives[r],trace_alternatives and formatters["value %a, taking %a"](value,r)
+ elseif value=="first" then
+ return alternatives[1],trace_alternatives and formatters["value %a, taking %a"](value,1)
+ elseif value=="last" then
+ return alternatives[n],trace_alternatives and formatters["value %a, taking %a"](value,n)
+ else
+ value=tonumber(value)
+ if type(value)~="number" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif value>n then
+ local defaultalt=otf.defaultnodealternate
+ if defaultalt=="first" then
+ return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif defaultalt=="last" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
+ else
+ return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
+ end
+ elseif value==0 then
+ return getchar(start),trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ elseif value<1 then
+ return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
+ else
+ return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value)
+ end
+ end
+end
+local function multiple_glyphs(head,start,multiple,ignoremarks)
+ local nofmultiples=#multiple
+ if nofmultiples>0 then
+ resetinjection(start)
+ setfield(start,"char",multiple[1])
+ if nofmultiples>1 then
+ local sn=getnext(start)
+ for k=2,nofmultiples do
+ local n=copy_node(start)
+ resetinjection(n)
+ setfield(n,"char",multiple[k])
+ setfield(n,"next",sn)
+ setfield(n,"prev",start)
+ if sn then
+ setfield(sn,"prev",n)
+ end
+ setfield(start,"next",n)
+ start=n
+ end
+ end
+ return head,start,true
+ else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(getchar(start)))
+ end
+ return head,start,false
+ end
+end
+function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
+ end
+ resetinjection(start)
+ setfield(start,"char",choice)
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment)
+ end
+ end
+ return head,start,true
+end
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple))
+ end
+ return multiple_glyphs(head,start,multiple,sequence.flags[1])
+end
+function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
+ local s,stop,discfound=getnext(start),nil,false
+ local startchar=getchar(start)
+ if marks[startchar] then
+ while s do
+ local id=getid(s)
+ if id==glyph_code and getfont(s)==currentfont and getsubtype(s)<256 then
+ local lg=ligature[getchar(s)]
+ if lg then
+ stop=s
+ ligature=lg
+ s=getnext(s)
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if stop then
+ local lig=ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar=getchar(stop)
+ head,start=markstoligature(kind,lookupname,head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
+ else
+ head,start=markstoligature(kind,lookupname,head,start,stop,lig)
+ end
+ return head,start,true
+ else
+ end
+ end
+ else
+ local skipmark=sequence.flags[1]
+ while s do
+ local id=getid(s)
+ if id==glyph_code and getsubtype(s)<256 then
+ if getfont(s)==currentfont then
+ local char=getchar(s)
+ if skipmark and marks[char] then
+ s=getnext(s)
+ else
+ local lg=ligature[char]
+ if lg then
+ stop=s
+ ligature=lg
+ s=getnext(s)
+ else
+ break
+ end
+ end
+ else
+ break
+ end
+ elseif id==disc_code then
+ discfound=true
+ s=getnext(s)
+ else
+ break
+ end
+ end
+ local lig=ligature.ligature
+ if lig then
+ if stop then
+ if trace_ligatures then
+ local stopchar=getchar(stop)
+ head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
+ else
+ head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ end
+ else
+ resetinjection(start)
+ setfield(start,"char",lig)
+ if trace_ligatures then
+ logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
+ end
+ end
+ return head,start,true
+ else
+ end
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=getchar(start)
+ if marks[markchar] then
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ if marks[basechar] then
+ while true do
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ end
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=getchar(start)
+ if marks[markchar] then
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ if marks[basechar] then
+ while true do
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=getligaindex(start)
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ else
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index)
+ end
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=getchar(start)
+ if marks[markchar] then
+ local base=getprev(start)
+ local slc=getligaindex(start)
+ if slc then
+ while base do
+ local blc=getligaindex(base)
+ if blc and blc~=slc then
+ base=getprev(base)
+ else
+ break
+ end
+ end
+ end
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
+ local alreadydone=cursonce and getprop(start,a_cursbase)
+ if not alreadydone then
+ local done=false
+ local startchar=getchar(start)
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt=getnext(start)
+ while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do
+ local nextchar=getchar(nxt)
+ if marks[nextchar] then
+ nxt=getnext(nxt)
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done=true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
+ end
+ return head,start,false
+ end
+end
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
+ local startchar=getchar(start)
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ end
+ return head,start,false
+end
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
+ local snext=getnext(start)
+ if not snext then
+ return head,start,false
+ else
+ local prev,done=start,false
+ local factor=tfmdata.parameters.factor
+ local lookuptype=lookuptypes[lookupname]
+ while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do
+ local nextchar=getchar(snext)
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=getnext(snext)
+ else
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local startchar=getchar(start)
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=getchar(start)
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
+ end
+ done=true
+ end
+ break
+ end
+ end
+ return head,start,done
+ end
+end
+local chainmores={}
+local chainprocs={}
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_subchain(...)
+end
+local logwarning=report_subchain
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_chain(...)
+end
+local logwarning=report_chain
+function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
+ logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head,start,false
+end
+function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
+ logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head,start,false
+end
+function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
+ local char=getchar(start)
+ local replacement=replacements[char]
+ if replacement then
+ if trace_singles then
+ logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
+ end
+ resetinjection(start)
+ setfield(start,"char",replacement)
+ return head,start,true
+ else
+ return head,start,false
+ end
+end
+function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local current=start
+ local subtables=currentlookup.subtables
+ if #subtables>1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
+ while current do
+ if getid(current)==glyph_code then
+ local currentchar=getchar(current)
+ local lookupname=subtables[1]
+ local replacement=lookuphash[lookupname]
+ if not replacement then
+ if trace_bugs then
+ logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ replacement=replacement[currentchar]
+ if not replacement or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
+ end
+ else
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
+ end
+ resetinjection(current)
+ setfield(current,"char",replacement)
+ end
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=getnext(current)
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_single=chainprocs.gsub_single
+function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local startchar=getchar(start)
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local replacements=lookuphash[lookupname]
+ if not replacements then
+ if trace_bugs then
+ logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ replacements=replacements[startchar]
+ if not replacements or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
+ end
+ else
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
+ end
+ return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_multiple=chainprocs.gsub_multiple
+function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local current=start
+ local subtables=currentlookup.subtables
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ while current do
+ if getid(current)==glyph_code then
+ local currentchar=getchar(current)
+ local lookupname=subtables[1]
+ local alternatives=lookuphash[lookupname]
+ if not alternatives then
+ if trace_bugs then
+ logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ alternatives=alternatives[currentchar]
+ if alternatives then
+ local choice,comment=get_alternative_glyph(current,alternatives,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
+ end
+ resetinjection(start)
+ setfield(start,"char",choice)
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
+ end
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=getnext(current)
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_alternate=chainprocs.gsub_alternate
+function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local startchar=getchar(start)
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local ligatures=lookuphash[lookupname]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ ligatures=ligatures[startchar]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ end
+ else
+ local s=getnext(start)
+ local discfound=false
+ local last=stop
+ local nofreplacements=0
+ local skipmark=currentlookup.flags[1]
+ while s do
+ local id=getid(s)
+ if id==disc_code then
+ s=getnext(s)
+ discfound=true
+ else
+ local schar=getchar(s)
+ if skipmark and marks[schar] then
+ s=getnext(s)
+ else
+ local lg=ligatures[schar]
+ if lg then
+ ligatures,last,nofreplacements=lg,s,nofreplacements+1
+ if s==stop then
+ break
+ else
+ s=getnext(s)
+ end
+ else
+ break
+ end
+ end
+ end
+ end
+ local l2=ligatures.ligature
+ if l2 then
+ if chainindex then
+ stop=last
+ end
+ if trace_ligatures then
+ if start==stop then
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
+ else
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
+ end
+ end
+ head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
+ return head,start,true,nofreplacements
+ elseif trace_bugs then
+ if start==stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
+ end
+ end
+ end
+ end
+ return head,start,false,0
+end
+chainmores.gsub_ligature=chainprocs.gsub_ligature
+function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=getchar(start)
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ if marks[basechar] then
+ while true do
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=getchar(start)
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ if marks[basechar] then
+ while true do
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=getligaindex(start)
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=getchar(start)
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=getprev(start)
+ local slc=getligaindex(start)
+ if slc then
+ while base do
+ local blc=getligaindex(base)
+ if blc and blc~=slc then
+ base=getprev(base)
+ else
+ break
+ end
+ end
+ end
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local alreadydone=cursonce and getprop(start,a_cursbase)
+ if not alreadydone then
+ local startchar=getchar(start)
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local exitanchors=lookuphash[lookupname]
+ if exitanchors then
+ exitanchors=exitanchors[startchar]
+ end
+ if exitanchors then
+ local done=false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt=getnext(start)
+ while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do
+ local nextchar=getchar(nxt)
+ if marks[nextchar] then
+ nxt=getnext(nxt)
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done=true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
+ end
+ return head,start,false
+ end
+ end
+ return head,start,false
+end
+function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local startchar=getchar(start)
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
+ if kerns then
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ end
+ end
+ end
+ return head,start,false
+end
+chainmores.gpos_single=chainprocs.gpos_single
+function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local snext=getnext(start)
+ if snext then
+ local startchar=getchar(start)
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
+ if kerns then
+ local lookuptype=lookuptypes[lookupname]
+ local prev,done=start,false
+ local factor=tfmdata.parameters.factor
+ while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do
+ local nextchar=getchar(snext)
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=getnext(snext)
+ else
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local startchar=getchar(start)
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=getchar(start)
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ local a,b=krn[2],krn[6]
+ if a and a~=0 then
+ local k=setkern(snext,factor,rlmode,a)
+ if trace_kerns then
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ end
+ end
+ if b and b~=0 then
+ logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
+ end
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ end
+ done=true
+ end
+ break
+ end
+ end
+ return head,start,done
+ end
+ end
+ end
+ return head,start,false
+end
+chainmores.gpos_pair=chainprocs.gpos_pair
+local function show_skip(kind,chainname,char,ck,class)
+ if ck[9] then
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
+end
+local quit_on_no_replacement=true
+directives.register("otf.chain.quitonnoreplacement",function(value)
+ quit_on_no_replacement=value
+end)
+local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
+ local flags=sequence.flags
+ local done=false
+ local skipmark=flags[1]
+ local skipligature=flags[2]
+ local skipbase=flags[3]
+ local someskip=skipmark or skipligature or skipbase
+ local markclass=sequence.markclass
+ local skipped=false
+ for k=1,#contexts do
+ local match=true
+ local current=start
+ local last=start
+ local ck=contexts[k]
+ local seq=ck[3]
+ local s=#seq
+ if s==1 then
+ match=getid(current)==glyph_code and getfont(current)==currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
+ else
+ local f,l=ck[4],ck[5]
+ if f==1 and f==l then
+ else
+ if f==l then
+ else
+ local n=f+1
+ last=getnext(last)
+ while n<=l do
+ if last then
+ local id=getid(last)
+ if id==glyph_code then
+ if getfont(last)==currentfont and getsubtype(last)<256 then
+ local char=getchar(last)
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ last=getnext(last)
+ elseif seq[n][char] then
+ if n<l then
+ last=getnext(last)
+ end
+ n=n+1
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ last=getnext(last)
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ end
+ end
+ end
+ if match and f>1 then
+ local prev=getprev(start)
+ if prev then
+ local n=f-1
+ while n>=1 do
+ if prev then
+ local id=getid(prev)
+ if id==glyph_code then
+ if getfont(prev)==currentfont and getsubtype(prev)<256 then
+ local char=getchar(prev)
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ elseif seq[n][32] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ prev=getprev(prev)
+ elseif seq[n][32] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ end
+ else
+ match=false
+ end
+ end
+ if match and s>l then
+ local current=last and getnext(last)
+ if current then
+ local n=l+1
+ while n<=s do
+ if current then
+ local id=getid(current)
+ if id==glyph_code then
+ if getfont(current)==currentfont and getsubtype(current)<256 then
+ local char=getchar(current)
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ current=getnext(current)
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ end
+ else
+ match=false
+ end
+ end
+ end
+ if match then
+ if trace_contexts then
+ local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5]
+ local char=getchar(start)
+ if ck[9] then
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ else
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ end
+ end
+ local chainlookups=ck[6]
+ if chainlookups then
+ local nofchainlookups=#chainlookups
+ if nofchainlookups==1 then
+ local chainlookupname=chainlookups[1]
+ local chainlookup=lookuptable[chainlookupname]
+ if chainlookup then
+ local cp=chainprocs[chainlookup.type]
+ if cp then
+ local ok
+ head,start,ok=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ if ok then
+ done=true
+ end
+ else
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ end
+ else
+ logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
+ end
+ else
+ local i=1
+ while true do
+ if skipped then
+ while true do
+ local char=getchar(start)
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ start=getnext(start)
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ end
+ local chainlookupname=chainlookups[i]
+ local chainlookup=lookuptable[chainlookupname]
+ if not chainlookup then
+ i=i+1
+ else
+ local cp=chainmores[chainlookup.type]
+ if not cp then
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ i=i+1
+ else
+ local ok,n
+ head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ if ok then
+ done=true
+ i=i+(n or 1)
+ else
+ i=i+1
+ end
+ end
+ end
+ if i>nofchainlookups then
+ break
+ elseif start then
+ start=getnext(start)
+ else
+ end
+ end
+ end
+ else
+ local replacements=ck[7]
+ if replacements then
+ head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements)
+ else
+ done=quit_on_no_replacement
+ if trace_contexts then
+ logprocess("%s: skipping match",cref(kind,chainname))
+ end
+ end
+ end
+ end
+ end
+ return head,start,done
+end
+local verbose_handle_contextchain=function(font,...)
+ logwarning("no verbose handler installed, reverting to 'normal'")
+ otf.setcontextchain()
+ return normal_handle_contextchain(...)
+end
+otf.chainhandlers={
+ normal=normal_handle_contextchain,
+ verbose=verbose_handle_contextchain,
+}
+function otf.setcontextchain(method)
+ if not method or method=="normal" or not otf.chainhandlers[method] then
+ if handlers.contextchain then
+ logwarning("installing normal contextchain handler")
+ end
+ handlers.contextchain=normal_handle_contextchain
+ else
+ logwarning("installing contextchain handler %a",method)
+ local handler=otf.chainhandlers[method]
+ handlers.contextchain=function(...)
+ return handler(currentfont,...)
+ end
+ end
+ handlers.gsub_context=handlers.contextchain
+ handlers.gsub_contextchain=handlers.contextchain
+ handlers.gsub_reversecontextchain=handlers.contextchain
+ handlers.gpos_contextchain=handlers.contextchain
+ handlers.gpos_context=handlers.contextchain
+end
+otf.setcontextchain()
+local missing={}
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_process(...)
+end
+local logwarning=report_process
+local function report_missing_cache(typ,lookup)
+ local f=missing[currentfont] if not f then f={} missing[currentfont]=f end
+ local t=f[typ] if not t then t={} f[typ]=t end
+ if not t[lookup] then
+ t[lookup]=true
+ logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
+ end
+end
+local resolved={}
+local lookuphashes={}
+setmetatableindex(lookuphashes,function(t,font)
+ local lookuphash=fontdata[font].resources.lookuphash
+ if not lookuphash or not next(lookuphash) then
+ lookuphash=false
+ end
+ t[font]=lookuphash
+ return lookuphash
+end)
+local autofeatures=fonts.analyzers.features
+local function initialize(sequence,script,language,enabled)
+ local features=sequence.features
+ if features then
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local kind=order[i]
+ local valid=enabled[kind]
+ if valid then
+ local scripts=features[kind]
+ local languages=scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ end
+ end
+ end
+ else
+ end
+ end
+ return false
+end
+function otf.dataset(tfmdata,font)
+ local shared=tfmdata.shared
+ local properties=tfmdata.properties
+ local language=properties.language or "dflt"
+ local script=properties.script or "dflt"
+ local enabled=shared.features
+ local res=resolved[font]
+ if not res then
+ res={}
+ resolved[font]=res
+ end
+ local rs=res[script]
+ if not rs then
+ rs={}
+ res[script]=rs
+ end
+ local rl=rs[language]
+ if not rl then
+ rl={
+ }
+ rs[language]=rl
+ local sequences=tfmdata.resources.sequences
+ for s=1,#sequences do
+ local v=enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1]=v
+ end
+ end
+ end
+ return rl
+end
+local function featuresprocessor(head,font,attr)
+ local lookuphash=lookuphashes[font]
+ if not lookuphash then
+ return head,false
+ end
+ head=tonut(head)
+ if trace_steps then
+ checkstep(head)
+ end
+ tfmdata=fontdata[font]
+ descriptions=tfmdata.descriptions
+ characters=tfmdata.characters
+ resources=tfmdata.resources
+ marks=resources.marks
+ anchorlookups=resources.lookup_to_anchor
+ lookuptable=resources.lookups
+ lookuptypes=resources.lookuptypes
+ lookuptags=resources.lookuptags
+ currentfont=font
+ rlmode=0
+ local sequences=resources.sequences
+ local done=false
+ local datasets=otf.dataset(tfmdata,font,attr)
+ local dirstack={}
+ for s=1,#datasets do
+ local dataset=datasets[s]
+ featurevalue=dataset[1]
+ local sequence=dataset[5]
+ local rlparmode=0
+ local topstack=0
+ local success=false
+ local attribute=dataset[2]
+ local chain=dataset[3]
+ local typ=sequence.type
+ local subtables=sequence.subtables
+ if chain<0 then
+ local handler=handlers[typ]
+ local start=find_node_tail(head)
+ while start do
+ local id=getid(start)
+ if id==glyph_code then
+ if getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
+ if a then
+ a=a==attr
+ else
+ a=true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[getchar(start)]
+ if lookupmatch then
+ head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=getprev(start) end
+ else
+ start=getprev(start)
+ end
+ else
+ start=getprev(start)
+ end
+ else
+ start=getprev(start)
+ end
+ end
+ else
+ local handler=handlers[typ]
+ local ns=#subtables
+ local start=head
+ rlmode=0
+ if ns==1 then
+ local lookupname=subtables[1]
+ local lookupcache=lookuphash[lookupname]
+ if not lookupcache then
+ report_missing_cache(typ,lookupname)
+ else
+ local function subrun(start)
+ local head=start
+ local done=false
+ while start do
+ local id=getid(start)
+ if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
+ else
+ a=not attribute or getprop(start,a_state)==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[getchar(start)]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done=true
+ end
+ end
+ if start then start=getnext(start) end
+ else
+ start=getnext(start)
+ end
+ else
+ start=getnext(start)
+ end
+ end
+ if done then
+ success=true
+ return head
+ end
+ end
+ local function kerndisc(disc)
+ local prev=getprev(disc)
+ local next=getnext(disc)
+ if prev and next then
+ setfield(prev,"next",next)
+ local a=getattr(prev,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute)
+ else
+ a=not attribute or getprop(prev,a_state)==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[getchar(prev)]
+ if lookupmatch then
+ local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done=true
+ success=true
+ end
+ end
+ end
+ setfield(prev,"next",disc)
+ end
+ return next
+ end
+ while start do
+ local id=getid(start)
+ if id==glyph_code then
+ if getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
+ else
+ a=not attribute or getprop(start,a_state)==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[getchar(start)]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success=true
+ end
+ end
+ if start then start=getnext(start) end
+ else
+ start=getnext(start)
+ end
+ else
+ start=getnext(start)
+ end
+ elseif id==disc_code then
+ if getsubtype(start)==discretionary_code then
+ local pre=getfield(start,"pre")
+ if pre then
+ local new=subrun(pre)
+ if new then setfield(start,"pre",new) end
+ end
+ local post=getfield(start,"post")
+ if post then
+ local new=subrun(post)
+ if new then setfield(start,"post",new) end
+ end
+ local replace=getfield(start,"replace")
+ if replace then
+ local new=subrun(replace)
+ if new then setfield(start,"replace",new) end
+ end
+elseif typ=="gpos_single" or typ=="gpos_pair" then
+ kerndisc(start)
+ end
+ start=getnext(start)
+ elseif id==whatsit_code then
+ local subtype=getsubtype(start)
+ if subtype==dir_code then
+ local dir=getfield(start,"dir")
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=getfield(start,"dir")
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start=getnext(start)
+ elseif id==math_code then
+ start=getnext(end_of_math(start))
+ else
+ start=getnext(start)
+ end
+ end
+ end
+ else
+ local function subrun(start)
+ local head=start
+ local done=false
+ while start do
+ local id=getid(start)
+ if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
+ else
+ a=not attribute or getprop(start,a_state)==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[getchar(start)]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done=true
+ break
+ elseif not start then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=getnext(start) end
+ else
+ start=getnext(start)
+ end
+ else
+ start=getnext(start)
+ end
+ end
+ if done then
+ success=true
+ return head
+ end
+ end
+ local function kerndisc(disc)
+ local prev=getprev(disc)
+ local next=getnext(disc)
+ if prev and next then
+ setfield(prev,"next",next)
+ local a=getattr(prev,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute)
+ else
+ a=not attribute or getprop(prev,a_state)==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[getchar(prev)]
+ if lookupmatch then
+ local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done=true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ end
+ setfield(prev,"next",disc)
+ end
+ return next
+ end
+ while start do
+ local id=getid(start)
+ if id==glyph_code then
+ if getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
+ else
+ a=not attribute or getprop(start,a_state)==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[getchar(start)]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success=true
+ break
+ elseif not start then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=getnext(start) end
+ else
+ start=getnext(start)
+ end
+ else
+ start=getnext(start)
+ end
+ elseif id==disc_code then
+ if getsubtype(start)==discretionary_code then
+ local pre=getfield(start,"pre")
+ if pre then
+ local new=subrun(pre)
+ if new then setfield(start,"pre",new) end
+ end
+ local post=getfield(start,"post")
+ if post then
+ local new=subrun(post)
+ if new then setfield(start,"post",new) end
+ end
+ local replace=getfield(start,"replace")
+ if replace then
+ local new=subrun(replace)
+ if new then setfield(start,"replace",new) end
+ end
+elseif typ=="gpos_single" or typ=="gpos_pair" then
+ kerndisc(start)
+ end
+ start=getnext(start)
+ elseif id==whatsit_code then
+ local subtype=getsubtype(start)
+ if subtype==dir_code then
+ local dir=getfield(start,"dir")
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=getfield(start,"dir")
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start=getnext(start)
+ elseif id==math_code then
+ start=getnext(end_of_math(start))
+ else
+ start=getnext(start)
+ end
+ end
+ end
+ end
+ if success then
+ done=true
+ end
+ if trace_steps then
+ registerstep(head)
+ end
+ end
+ head=tonode(head)
+ return head,done
+end
+local function generic(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if target then
+ target[unicode]=lookupdata
+ else
+ lookuphash[lookupname]={ [unicode]=lookupdata }
+ end
+end
+local action={
+ substitution=generic,
+ multiple=generic,
+ alternate=generic,
+ position=generic,
+ ligature=function(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
+ end
+ for i=1,#lookupdata do
+ local li=lookupdata[i]
+ local tu=target[li]
+ if not tu then
+ tu={}
+ target[li]=tu
+ end
+ target=tu
+ end
+ target.ligature=unicode
+ end,
+ pair=function(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
+ end
+ local others=target[unicode]
+ local paired=lookupdata[1]
+ if others then
+ others[paired]=lookupdata
+ else
+ others={ [paired]=lookupdata }
+ target[unicode]=others
+ end
+ end,
+}
+local function prepare_lookups(tfmdata)
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local anchor_to_lookup=resources.anchor_to_lookup
+ local lookup_to_anchor=resources.lookup_to_anchor
+ local lookuptypes=resources.lookuptypes
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ if description then
+ local lookups=description.slookups
+ if lookups then
+ for lookupname,lookupdata in next,lookups do
+ action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for lookupname,lookuplist in next,lookups do
+ local lookuptype=lookuptypes[lookupname]
+ for l=1,#lookuplist do
+ local lookupdata=lookuplist[l]
+ action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ end
+ local list=description.kerns
+ if list then
+ for lookup,krn in next,list do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=krn
+ else
+ lookuphash[lookup]={ [unicode]=krn }
+ end
+ end
+ end
+ local list=description.anchors
+ if list then
+ for typ,anchors in next,list do
+ if typ=="mark" or typ=="cexit" then
+ for name,anchor in next,anchors do
+ local lookups=anchor_to_lookup[name]
+ if lookups then
+ for lookup,_ in next,lookups do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=anchors
+ else
+ lookuphash[lookup]={ [unicode]=anchors }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+local function split(replacement,original)
+ local result={}
+ for i=1,#replacement do
+ result[original[i]]=replacement[i]
+ end
+ return result
+end
+local valid={
+ coverage={ chainsub=true,chainpos=true,contextsub=true },
+ reversecoverage={ reversesub=true },
+ glyphs={ chainsub=true,chainpos=true },
+}
+local function prepare_contextchains(tfmdata)
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local lookuptags=resources.lookuptags
+ local lookups=rawdata.lookups
+ if lookups then
+ for lookupname,lookupdata in next,rawdata.lookups do
+ local lookuptype=lookupdata.type
+ if lookuptype then
+ local rules=lookupdata.rules
+ if rules then
+ local format=lookupdata.format
+ local validformat=valid[format]
+ if not validformat then
+ report_prepare("unsupported format %a",format)
+ elseif not validformat[lookuptype] then
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname])
+ else
+ local contexts=lookuphash[lookupname]
+ if not contexts then
+ contexts={}
+ lookuphash[lookupname]=contexts
+ end
+ local t,nt={},0
+ for nofrules=1,#rules do
+ local rule=rules[nofrules]
+ local current=rule.current
+ local before=rule.before
+ local after=rule.after
+ local replacements=rule.replacements
+ local sequence={}
+ local nofsequences=0
+ if before then
+ for n=1,#before do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=before[n]
+ end
+ end
+ local start=nofsequences+1
+ for n=1,#current do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=current[n]
+ end
+ local stop=nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=after[n]
+ end
+ end
+ if sequence[1] then
+ nt=nt+1
+ t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements }
+ for unic,_ in next,sequence[start] do
+ local cu=contexts[unic]
+ if not cu then
+ contexts[unic]=t
+ end
+ end
+ end
+ end
+ end
+ else
+ end
+ else
+ report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname])
+ end
+ end
+ end
+end
+local function featuresinitializer(tfmdata,value)
+ if true then
+ local rawdata=tfmdata.shared.rawdata
+ local properties=rawdata.properties
+ if not properties.initialized then
+ local starttime=trace_preparing and os.clock()
+ local resources=rawdata.resources
+ resources.lookuphash=resources.lookuphash or {}
+ prepare_contextchains(tfmdata)
+ prepare_lookups(tfmdata)
+ properties.initialized=true
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
+ end
+ end
+ end
+end
+registerotffeature {
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ position=1,
+ node=featuresinitializer,
+ },
+ processors={
+ node=featuresprocessor,
+ }
+}
+otf.handlers=handlers
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otp']={
+ version=1.001,
+ comment="companion to font-otf.lua (packing)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,type=next,type
+local sort,concat=table.sort,table.concat
+local sortedhash=table.sortedhash
+local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end)
+local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+fonts=fonts or {}
+local handlers=fonts.handlers or {}
+fonts.handlers=handlers
+local otf=handlers.otf or {}
+handlers.otf=otf
+local enhancers=otf.enhancers or {}
+otf.enhancers=enhancers
+local glists=otf.glists or { "gsub","gpos" }
+otf.glists=glists
+local criterium=1
+local threshold=0
+local function tabstr_normal(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ if type(v)=="table" then
+ s[n]=k..">"..tabstr_normal(v)
+ elseif v==true then
+ s[n]=k.."+"
+ elseif v then
+ s[n]=k.."="..v
+ else
+ s[n]=k.."-"
+ end
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function tabstr_flat(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ s[n]=k.."="..v
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function tabstr_mixed(t)
+ local s={}
+ local n=#t
+ if n==0 then
+ return ""
+ elseif n==1 then
+ local k=t[1]
+ if k==true then
+ return "++"
+ elseif k==false then
+ return "--"
+ else
+ return tostring(k)
+ end
+ else
+ for i=1,n do
+ local k=t[i]
+ if k==true then
+ s[i]="++"
+ elseif k==false then
+ s[i]="--"
+ else
+ s[i]=k
+ end
+ end
+ return concat(s,",")
+ end
+end
+local function tabstr_boolean(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ if v then
+ s[n]=k.."+"
+ else
+ s[n]=k.."-"
+ end
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function packdata(data)
+ if data then
+ local h,t,c={},{},{}
+ local hh,tt,cc={},{},{}
+ local nt,ntt=0,0
+ local function pack_normal(v)
+ local tag=tabstr_normal(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_flat(v)
+ local tag=tabstr_flat(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_boolean(v)
+ local tag=tabstr_boolean(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_indexed(v)
+ local tag=concat(v," ")
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_mixed(v)
+ local tag=tabstr_mixed(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_final(v)
+ if c[v]<=criterium then
+ return t[v]
+ else
+ local hv=hh[v]
+ if hv then
+ return hv
+ else
+ ntt=ntt+1
+ tt[ntt]=t[v]
+ hh[v]=ntt
+ cc[ntt]=c[v]
+ return ntt
+ end
+ end
+ end
+ local function success(stage,pass)
+ if nt==0 then
+ if trace_loading or trace_packing then
+ report_otf("pack quality: nothing to pack")
+ end
+ return false
+ elseif nt>=threshold then
+ local one,two,rest=0,0,0
+ if pass==1 then
+ for k,v in next,c do
+ if v==1 then
+ one=one+1
+ elseif v==2 then
+ two=two+1
+ else
+ rest=rest+1
+ end
+ end
+ else
+ for k,v in next,cc do
+ if v>20 then
+ rest=rest+1
+ elseif v>10 then
+ two=two+1
+ else
+ one=one+1
+ end
+ end
+ data.tables=tt
+ end
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",stage,pass,one+two+rest,one,two,rest,criterium)
+ end
+ return true
+ else
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",stage,pass,nt,threshold)
+ end
+ return false
+ end
+ end
+ local function packers(pass)
+ if pass==1 then
+ return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed
+ else
+ return pack_final,pack_final,pack_final,pack_final,pack_final
+ end
+ end
+ local resources=data.resources
+ local lookuptypes=resources.lookuptypes
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 1, pass %s",pass)
+ end
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,data.descriptions do
+ local boundingbox=description.boundingbox
+ if boundingbox then
+ description.boundingbox=pack_indexed(boundingbox)
+ end
+ local slookups=description.slookups
+ if slookups then
+ for tag,slookup in next,slookups do
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ local t=slookup[2] if t then slookup[2]=pack_indexed(t) end
+ local t=slookup[3] if t then slookup[3]=pack_indexed(t) end
+ elseif what~="substitution" then
+ slookups[tag]=pack_indexed(slookup)
+ end
+ end
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ for tag,mlookup in next,mlookups do
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ for i=1,#mlookup do
+ local lookup=mlookup[i]
+ local t=lookup[2] if t then lookup[2]=pack_indexed(t) end
+ local t=lookup[3] if t then lookup[3]=pack_indexed(t) end
+ end
+ elseif what~="substitution" then
+ for i=1,#mlookup do
+ mlookup[i]=pack_indexed(mlookup[i])
+ end
+ end
+ end
+ end
+ local kerns=description.kerns
+ if kerns then
+ for tag,kern in next,kerns do
+ kerns[tag]=pack_flat(kern)
+ end
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ for tag,kern in next,kerns do
+ kerns[tag]=pack_normal(kern)
+ end
+ end
+ end
+ local anchors=description.anchors
+ if anchors then
+ for what,anchor in next,anchors do
+ if what=="baselig" then
+ for _,a in next,anchor do
+ for k=1,#a do
+ a[k]=pack_indexed(a[k])
+ end
+ end
+ else
+ for k,v in next,anchor do
+ anchor[k]=pack_indexed(v)
+ end
+ end
+ end
+ end
+ local altuni=description.altuni
+ if altuni then
+ for i=1,#altuni do
+ altuni[i]=pack_flat(altuni[i])
+ end
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local rules=lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.replacements if r then rule.replacements=pack_flat (r) end
+ local r=rule.lookups if r then rule.lookups=pack_indexed(r) end
+ end
+ end
+ end
+ end
+ local anchor_to_lookup=resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor,lookup in next,anchor_to_lookup do
+ anchor_to_lookup[anchor]=pack_normal(lookup)
+ end
+ end
+ local lookup_to_anchor=resources.lookup_to_anchor
+ if lookup_to_anchor then
+ for lookup,anchor in next,lookup_to_anchor do
+ lookup_to_anchor[lookup]=pack_normal(anchor)
+ end
+ end
+ local sequences=resources.sequences
+ if sequences then
+ for feature,sequence in next,sequences do
+ local flags=sequence.flags
+ if flags then
+ sequence.flags=pack_normal(flags)
+ end
+ local subtables=sequence.subtables
+ if subtables then
+ sequence.subtables=pack_normal(subtables)
+ end
+ local features=sequence.features
+ if features then
+ for script,feature in next,features do
+ features[script]=pack_normal(feature)
+ end
+ end
+ local order=sequence.order
+ if order then
+ sequence.order=pack_indexed(order)
+ end
+ local markclass=sequence.markclass
+ if markclass then
+ sequence.markclass=pack_boolean(markclass)
+ end
+ end
+ end
+ local lookups=resources.lookups
+ if lookups then
+ for name,lookup in next,lookups do
+ local flags=lookup.flags
+ if flags then
+ lookup.flags=pack_normal(flags)
+ end
+ local subtables=lookup.subtables
+ if subtables then
+ lookup.subtables=pack_normal(subtables)
+ end
+ end
+ end
+ local features=resources.features
+ if features then
+ for _,what in next,glists do
+ local list=features[what]
+ if list then
+ for feature,spec in next,list do
+ list[feature]=pack_normal(spec)
+ end
+ end
+ end
+ end
+ if not success(1,pass) then
+ return
+ end
+ end
+ if nt>0 then
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 2, pass %s",pass)
+ end
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,data.descriptions do
+ local kerns=description.kerns
+ if kerns then
+ description.kerns=pack_normal(kerns)
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ math.kerns=pack_normal(kerns)
+ end
+ end
+ local anchors=description.anchors
+ if anchors then
+ description.anchors=pack_normal(anchors)
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ for tag,mlookup in next,mlookups do
+ mlookups[tag]=pack_normal(mlookup)
+ end
+ end
+ local altuni=description.altuni
+ if altuni then
+ description.altuni=pack_normal(altuni)
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local rules=lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local r=rule.before if r then rule.before=pack_normal(r) end
+ local r=rule.after if r then rule.after=pack_normal(r) end
+ local r=rule.current if r then rule.current=pack_normal(r) end
+ end
+ end
+ end
+ end
+ local sequences=resources.sequences
+ if sequences then
+ for feature,sequence in next,sequences do
+ sequence.features=pack_normal(sequence.features)
+ end
+ end
+ if not success(2,pass) then
+ end
+ end
+ for pass=1,2 do
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,data.descriptions do
+ local slookups=description.slookups
+ if slookups then
+ description.slookups=pack_normal(slookups)
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ description.mlookups=pack_normal(mlookups)
+ end
+ end
+ end
+ end
+ end
+end
+local unpacked_mt={
+ __index=function(t,k)
+ t[k]=false
+ return k
+ end
+}
+local function unpackdata(data)
+ if data then
+ local tables=data.tables
+ if tables then
+ local resources=data.resources
+ local lookuptypes=resources.lookuptypes
+ local unpacked={}
+ setmetatable(unpacked,unpacked_mt)
+ for unicode,description in next,data.descriptions do
+ local tv=tables[description.boundingbox]
+ if tv then
+ description.boundingbox=tv
+ end
+ local slookups=description.slookups
+ if slookups then
+ local tv=tables[slookups]
+ if tv then
+ description.slookups=tv
+ slookups=unpacked[tv]
+ end
+ if slookups then
+ for tag,lookup in next,slookups do
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ local tv=tables[lookup[2]]
+ if tv then
+ lookup[2]=tv
+ end
+ local tv=tables[lookup[3]]
+ if tv then
+ lookup[3]=tv
+ end
+ elseif what~="substitution" then
+ local tv=tables[lookup]
+ if tv then
+ slookups[tag]=tv
+ end
+ end
+ end
+ end
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ local tv=tables[mlookups]
+ if tv then
+ description.mlookups=tv
+ mlookups=unpacked[tv]
+ end
+ if mlookups then
+ for tag,list in next,mlookups do
+ local tv=tables[list]
+ if tv then
+ mlookups[tag]=tv
+ list=unpacked[tv]
+ end
+ if list then
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ for i=1,#list do
+ local lookup=list[i]
+ local tv=tables[lookup[2]]
+ if tv then
+ lookup[2]=tv
+ end
+ local tv=tables[lookup[3]]
+ if tv then
+ lookup[3]=tv
+ end
+ end
+ elseif what~="substitution" then
+ for i=1,#list do
+ local tv=tables[list[i]]
+ if tv then
+ list[i]=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ local kerns=description.kerns
+ if kerns then
+ local tm=tables[kerns]
+ if tm then
+ description.kerns=tm
+ kerns=unpacked[tm]
+ end
+ if kerns then
+ for k,kern in next,kerns do
+ local tv=tables[kern]
+ if tv then
+ kerns[k]=tv
+ end
+ end
+ end
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ local tm=tables[kerns]
+ if tm then
+ math.kerns=tm
+ kerns=unpacked[tm]
+ end
+ if kerns then
+ for k,kern in next,kerns do
+ local tv=tables[kern]
+ if tv then
+ kerns[k]=tv
+ end
+ end
+ end
+ end
+ end
+ local anchors=description.anchors
+ if anchors then
+ local ta=tables[anchors]
+ if ta then
+ description.anchors=ta
+ anchors=unpacked[ta]
+ end
+ if anchors then
+ for tag,anchor in next,anchors do
+ if tag=="baselig" then
+ for _,list in next,anchor do
+ for i=1,#list do
+ local tv=tables[list[i]]
+ if tv then
+ list[i]=tv
+ end
+ end
+ end
+ else
+ for a,data in next,anchor do
+ local tv=tables[data]
+ if tv then
+ anchor[a]=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ local altuni=description.altuni
+ if altuni then
+ local altuni=tables[altuni]
+ if altuni then
+ description.altuni=altuni
+ for i=1,#altuni do
+ local tv=tables[altuni[i]]
+ if tv then
+ altuni[i]=tv
+ end
+ end
+ end
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local rules=lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local before=rule.before
+ if before then
+ local tv=tables[before]
+ if tv then
+ rule.before=tv
+ before=unpacked[tv]
+ end
+ if before then
+ for i=1,#before do
+ local tv=tables[before[i]]
+ if tv then
+ before[i]=tv
+ end
+ end
+ end
+ end
+ local after=rule.after
+ if after then
+ local tv=tables[after]
+ if tv then
+ rule.after=tv
+ after=unpacked[tv]
+ end
+ if after then
+ for i=1,#after do
+ local tv=tables[after[i]]
+ if tv then
+ after[i]=tv
+ end
+ end
+ end
+ end
+ local current=rule.current
+ if current then
+ local tv=tables[current]
+ if tv then
+ rule.current=tv
+ current=unpacked[tv]
+ end
+ if current then
+ for i=1,#current do
+ local tv=tables[current[i]]
+ if tv then
+ current[i]=tv
+ end
+ end
+ end
+ end
+ local replacements=rule.replacements
+ if replacements then
+ local tv=tables[replacements]
+ if tv then
+ rule.replacements=tv
+ end
+ end
+ local lookups=rule.lookups
+ if lookups then
+ local tv=tables[lookups]
+ if tv then
+ rule.lookups=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ local anchor_to_lookup=resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor,lookup in next,anchor_to_lookup do
+ local tv=tables[lookup]
+ if tv then
+ anchor_to_lookup[anchor]=tv
+ end
+ end
+ end
+ local lookup_to_anchor=resources.lookup_to_anchor
+ if lookup_to_anchor then
+ for lookup,anchor in next,lookup_to_anchor do
+ local tv=tables[anchor]
+ if tv then
+ lookup_to_anchor[lookup]=tv
+ end
+ end
+ end
+ local ls=resources.sequences
+ if ls then
+ for _,feature in next,ls do
+ local flags=feature.flags
+ if flags then
+ local tv=tables[flags]
+ if tv then
+ feature.flags=tv
+ end
+ end
+ local subtables=feature.subtables
+ if subtables then
+ local tv=tables[subtables]
+ if tv then
+ feature.subtables=tv
+ end
+ end
+ local features=feature.features
+ if features then
+ local tv=tables[features]
+ if tv then
+ feature.features=tv
+ features=unpacked[tv]
+ end
+ if features then
+ for script,data in next,features do
+ local tv=tables[data]
+ if tv then
+ features[script]=tv
+ end
+ end
+ end
+ end
+ local order=feature.order
+ if order then
+ local tv=tables[order]
+ if tv then
+ feature.order=tv
+ end
+ end
+ local markclass=feature.markclass
+ if markclass then
+ local tv=tables[markclass]
+ if tv then
+ feature.markclass=tv
+ end
+ end
+ end
+ end
+ local lookups=resources.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local flags=lookup.flags
+ if flags then
+ local tv=tables[flags]
+ if tv then
+ lookup.flags=tv
+ end
+ end
+ local subtables=lookup.subtables
+ if subtables then
+ local tv=tables[subtables]
+ if tv then
+ lookup.subtables=tv
+ end
+ end
+ end
+ end
+ local features=resources.features
+ if features then
+ for _,what in next,glists do
+ local feature=features[what]
+ if feature then
+ for tag,spec in next,feature do
+ local tv=tables[spec]
+ if tv then
+ feature[tag]=tv
+ end
+ end
+ end
+ end
+ end
+ data.tables=nil
+ end
+ end
+end
+if otf.enhancers.register then
+ otf.enhancers.register("pack",packdata)
+ otf.enhancers.register("unpack",unpackdata)
+end
+otf.enhancers.unpack=unpackdata
+otf.enhancers.pack=packdata
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-lua']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.formats.lua="lua"
+function fonts.readers.lua(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ local fullname=resolvers.findfile(fullname) or ""
+ if fullname~="" then
+ local loader=loadfile(fullname)
+ loader=loader and loader()
+ return loader and loader(specification)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-def']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub
+local tostring,next=tostring,next
+local lpegmatch=lpeg.match
+local suffixonly,removesuffix=file.suffix,file.removesuffix
+local allocate=utilities.storage.allocate
+local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end)
+local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end)
+trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading")
+trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*")
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local fontdata=fonts.hashes.identifiers
+local readers=fonts.readers
+local definers=fonts.definers
+local specifiers=fonts.specifiers
+local constructors=fonts.constructors
+local fontgoodies=fonts.goodies
+readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' }
+local variants=allocate()
+specifiers.variants=variants
+definers.methods=definers.methods or {}
+local internalized=allocate()
+local lastdefined=nil
+local loadedfonts=constructors.loadedfonts
+local designsizes=constructors.designsizes
+local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
+local splitter,splitspecifiers=nil,""
+local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc
+local left=P("(")
+local right=P(")")
+local colon=P(":")
+local space=P(" ")
+definers.defaultlookup="file"
+local prefixpattern=P(false)
+local function addspecifier(symbol)
+ splitspecifiers=splitspecifiers..symbol
+ local method=S(splitspecifiers)
+ local lookup=C(prefixpattern)*colon
+ local sub=left*C(P(1-left-right-method)^1)*right
+ local specification=C(method)*C(P(1)^1)
+ local name=C((1-sub-specification)^1)
+ splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc("")))
+end
+local function addlookup(str,default)
+ prefixpattern=prefixpattern+P(str)
+end
+definers.addlookup=addlookup
+addlookup("file")
+addlookup("name")
+addlookup("spec")
+local function getspecification(str)
+ return lpegmatch(splitter,str or "")
+end
+definers.getspecification=getspecification
+function definers.registersplit(symbol,action,verbosename)
+ addspecifier(symbol)
+ variants[symbol]=action
+ if verbosename then
+ variants[verbosename]=action
+ end
+end
+local function makespecification(specification,lookup,name,sub,method,detail,size)
+ size=size or 655360
+ if not lookup or lookup=="" then
+ lookup=definers.defaultlookup
+ end
+ if trace_defining then
+ report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a",
+ specification,lookup,name,sub,method,detail)
+ end
+ local t={
+ lookup=lookup,
+ specification=specification,
+ size=size,
+ name=name,
+ sub=sub,
+ method=method,
+ detail=detail,
+ resolved="",
+ forced="",
+ features={},
+ }
+ return t
+end
+definers.makespecification=makespecification
+function definers.analyze(specification,size)
+ local lookup,name,sub,method,detail=getspecification(specification or "")
+ return makespecification(specification,lookup,name,sub,method,detail,size)
+end
+definers.resolvers=definers.resolvers or {}
+local resolvers=definers.resolvers
+function resolvers.file(specification)
+ local name=resolvefile(specification.name)
+ local suffix=lower(suffixonly(name))
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.forcedname=name
+ specification.name=removesuffix(name)
+ else
+ specification.name=name
+ end
+end
+function resolvers.name(specification)
+ local resolve=fonts.names.resolve
+ if resolve then
+ local resolved,sub=resolve(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ local suffix=lower(suffixonly(resolved))
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.forcedname=resolved
+ specification.name=removesuffix(resolved)
+ else
+ specification.name=resolved
+ end
+ end
+ else
+ resolvers.file(specification)
+ end
+end
+function resolvers.spec(specification)
+ local resolvespec=fonts.names.resolvespec
+ if resolvespec then
+ local resolved,sub=resolvespec(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ specification.forced=lower(suffixonly(resolved))
+ specification.forcedname=resolved
+ specification.name=removesuffix(resolved)
+ end
+ else
+ resolvers.name(specification)
+ end
+end
+function definers.resolve(specification)
+ if not specification.resolved or specification.resolved=="" then
+ local r=resolvers[specification.lookup]
+ if r then
+ r(specification)
+ end
+ end
+ if specification.forced=="" then
+ specification.forced=nil
+ specification.forcedname=nil
+ end
+ specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification))
+ if specification.sub and specification.sub~="" then
+ specification.hash=specification.sub..' @ '..specification.hash
+ end
+ return specification
+end
+function definers.applypostprocessors(tfmdata)
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ local properties=tfmdata.properties
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=gsub(lower(extrahash),"[^a-z]","-")
+ properties.fullname=format("%s-%s",properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
+local function checkembedding(tfmdata)
+ local properties=tfmdata.properties
+ local embedding
+ if directive_embedall then
+ embedding="full"
+ elseif properties and properties.filename and constructors.dontembed[properties.filename] then
+ embedding="no"
+ else
+ embedding="subset"
+ end
+ if properties then
+ properties.embedding=embedding
+ else
+ tfmdata.properties={ embedding=embedding }
+ end
+ tfmdata.embedding=embedding
+end
+function definers.loadfont(specification)
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=loadedfonts[hash]
+ if not tfmdata then
+ local forced=specification.forced or ""
+ if forced~="" then
+ local reader=readers[lower(forced)]
+ tfmdata=reader and reader(specification)
+ if not tfmdata then
+ report_defining("forced type %a of %a not found",forced,specification.name)
+ end
+ else
+ local sequence=readers.sequence
+ for s=1,#sequence do
+ local reader=sequence[s]
+ if readers[reader] then
+ if trace_defining then
+ report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename)
+ end
+ tfmdata=readers[reader](specification)
+ if tfmdata then
+ break
+ else
+ specification.filename=nil
+ end
+ end
+ end
+ end
+ if tfmdata then
+ tfmdata=definers.applypostprocessors(tfmdata)
+ checkembedding(tfmdata)
+ loadedfonts[hash]=tfmdata
+ designsizes[specification.hash]=tfmdata.parameters.designsize
+ end
+ end
+ if not tfmdata then
+ report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup)
+ end
+ return tfmdata
+end
+function constructors.checkvirtualids()
+end
+function constructors.readanddefine(name,size)
+ local specification=definers.analyze(name,size)
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local id=definers.registered(hash)
+ if not id then
+ local tfmdata=definers.loadfont(specification)
+ if tfmdata then
+ tfmdata.properties.hash=hash
+ constructors.checkvirtualids(tfmdata)
+ id=font.define(tfmdata)
+ definers.register(tfmdata,id)
+ else
+ id=0
+ end
+ end
+ return fontdata[id],id
+end
+function definers.current()
+ return lastdefined
+end
+function definers.registered(hash)
+ local id=internalized[hash]
+ return id,id and fontdata[id]
+end
+function definers.register(tfmdata,id)
+ if tfmdata and id then
+ local hash=tfmdata.properties.hash
+ if not hash then
+ report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?")
+ elseif not internalized[hash] then
+ internalized[hash]=id
+ if trace_defining then
+ report_defining("registering font, id %s, hash %a",id,hash)
+ end
+ fontdata[id]=tfmdata
+ end
+ end
+end
+function definers.read(specification,size,id)
+ statistics.starttiming(fonts)
+ if type(specification)=="string" then
+ specification=definers.analyze(specification,size)
+ end
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=definers.registered(hash)
+ if tfmdata then
+ if trace_defining then
+ report_defining("already hashed: %s",hash)
+ end
+ else
+ tfmdata=definers.loadfont(specification)
+ if tfmdata then
+ if trace_defining then
+ report_defining("loaded and hashed: %s",hash)
+ end
+ tfmdata.properties.hash=hash
+ if id then
+ definers.register(tfmdata,id)
+ end
+ else
+ if trace_defining then
+ report_defining("not loaded and hashed: %s",hash)
+ end
+ end
+ end
+ lastdefined=tfmdata or id
+ if not tfmdata then
+ report_defining("unknown font %a, loading aborted",specification.name)
+ elseif trace_defining and type(tfmdata)=="table" then
+ local properties=tfmdata.properties or {}
+ local parameters=tfmdata.parameters or {}
+ report_defining("using %a font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
+ properties.format or "unknown",id,properties.name,parameters.size,properties.encodingbytes,
+ properties.encodingname,properties.fullname,file.basename(properties.filename))
+ end
+ statistics.stoptiming(fonts)
+ return tfmdata
+end
+function font.getfont(id)
+ return fontdata[id]
+end
+callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)")
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-font-def']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.constructors.namemode="specification"
+function fonts.definers.getspecification(str)
+ return "",str,"",":",str
+end
+local list={}
+local function issome () list.lookup='name' end
+local function isfile () list.lookup='file' end
+local function isname () list.lookup='name' end
+local function thename(s) list.name=s end
+local function issub (v) list.sub=v end
+local function iscrap (s) list.crap=string.lower(s) end
+local function iskey (k,v) list[k]=v end
+local function istrue (s) list[s]=true end
+local function isfalse(s) list[s]=false end
+local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C
+local spaces=P(" ")^0
+local namespec=(1-S("/:("))^0
+local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces
+local filename_1=P("file:")/isfile*(namespec/thename)
+local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]")
+local fontname_1=P("name:")/isname*(namespec/thename)
+local fontname_2=P(true)/issome*(namespec/thename)
+local sometext=(R("az","AZ","09")+S("+-."))^1
+local truevalue=P("+")*spaces*(sometext/istrue)
+local falsevalue=P("-")*spaces*(sometext/isfalse)
+local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey
+local somevalue=sometext/istrue
+local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")")
+local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces
+local options=P(":")*spaces*(P(";")^0*option)^0
+local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0
+local function colonized(specification)
+ list={}
+ lpeg.match(pattern,specification.specification)
+ list.crap=nil
+ if list.name then
+ specification.name=list.name
+ list.name=nil
+ end
+ if list.lookup then
+ specification.lookup=list.lookup
+ list.lookup=nil
+ end
+ if list.sub then
+ specification.sub=list.sub
+ list.sub=nil
+ end
+ specification.features.normal=fonts.handlers.otf.features.normalize(list)
+ return specification
+end
+fonts.definers.registersplit(":",colonized,"cryptic")
+fonts.definers.registersplit("",colonized,"more cryptic")
+function fonts.definers.applypostprocessors(tfmdata)
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=string.gsub(lower(extrahash),"[^a-z]","-")
+ tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-ext']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local otffeatures=fonts.constructors.newfeatures("otf")
+local function initializeitlc(tfmdata,value)
+ if value then
+ local parameters=tfmdata.parameters
+ local italicangle=parameters.italicangle
+ if italicangle and italicangle~=0 then
+ local properties=tfmdata.properties
+ local factor=tonumber(value) or 1
+ properties.hasitalics=true
+ properties.autoitalicamount=factor*(parameters.uwidth or 40)/2
+ end
+ end
+end
+otffeatures.register {
+ name="itlc",
+ description="italic correction",
+ initializers={
+ base=initializeitlc,
+ node=initializeitlc,
+ }
+}
+local function initializeslant(tfmdata,value)
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>1 then
+ value=1
+ elseif value<-1 then
+ value=-1
+ end
+ tfmdata.parameters.slantfactor=value
+end
+otffeatures.register {
+ name="slant",
+ description="slant glyphs",
+ initializers={
+ base=initializeslant,
+ node=initializeslant,
+ }
+}
+local function initializeextend(tfmdata,value)
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>10 then
+ value=10
+ elseif value<-10 then
+ value=-10
+ end
+ tfmdata.parameters.extendfactor=value
+end
+otffeatures.register {
+ name="extend",
+ description="scale glyphs horizontally",
+ initializers={
+ base=initializeextend,
+ node=initializeextend,
+ }
+}
+fonts.protrusions=fonts.protrusions or {}
+fonts.protrusions.setups=fonts.protrusions.setups or {}
+local setups=fonts.protrusions.setups
+local function initializeprotrusion(tfmdata,value)
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1
+ local emwidth=tfmdata.parameters.quad
+ tfmdata.parameters.protrusion={
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v,pl,pr=setup[i],nil,nil
+ if v then
+ pl,pr=v[1],v[2]
+ end
+ if pl and pl~=0 then chr.left_protruding=left*pl*factor end
+ if pr and pr~=0 then chr.right_protruding=right*pr*factor end
+ end
+ end
+ end
+end
+otffeatures.register {
+ name="protrusion",
+ description="shift characters into the left and or right margin",
+ initializers={
+ base=initializeprotrusion,
+ node=initializeprotrusion,
+ }
+}
+fonts.expansions=fonts.expansions or {}
+fonts.expansions.setups=fonts.expansions.setups or {}
+local setups=fonts.expansions.setups
+local function initializeexpansion(tfmdata,value)
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor=setup.factor or 1
+ tfmdata.parameters.expansion={
+ stretch=10*(setup.stretch or 0),
+ shrink=10*(setup.shrink or 0),
+ step=10*(setup.step or 0),
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v=setup[i]
+ if v and v~=0 then
+ chr.expansion_factor=v*factor
+ else
+ chr.expansion_factor=factor
+ end
+ end
+ end
+ end
+end
+otffeatures.register {
+ name="expansion",
+ description="apply hz optimization",
+ initializers={
+ base=initializeexpansion,
+ node=initializeexpansion,
+ }
+}
+function fonts.loggers.onetimemessage() end
+local byte=string.byte
+fonts.expansions.setups['default']={
+ stretch=2,shrink=2,step=.5,factor=1,
+ [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7,
+ [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7,
+ [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7,
+ [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7,
+ [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7,
+ [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7,
+ [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7,
+ [byte('w')]=0.7,[byte('z')]=0.7,
+ [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7,
+}
+fonts.protrusions.setups['default']={
+ factor=1,left=1,right=1,
+ [0x002C]={ 0,1 },
+ [0x002E]={ 0,1 },
+ [0x003A]={ 0,1 },
+ [0x003B]={ 0,1 },
+ [0x002D]={ 0,1 },
+ [0x2013]={ 0,0.50 },
+ [0x2014]={ 0,0.33 },
+ [0x3001]={ 0,1 },
+ [0x3002]={ 0,1 },
+ [0x060C]={ 0,1 },
+ [0x061B]={ 0,1 },
+ [0x06D4]={ 0,1 },
+}
+fonts.handlers.otf.features.normalize=function(t)
+ if t.rand then
+ t.rand="random"
+ end
+ return t
+end
+function fonts.helpers.nametoslot(name)
+ local t=type(name)
+ if t=="string" then
+ local tfmdata=fonts.hashes.identifiers[currentfont()]
+ local shared=tfmdata and tfmdata.shared
+ local fntdata=shared and shared.rawdata
+ return fntdata and fntdata.resources.unicodes[name]
+ elseif t=="number" then
+ return n
+ end
+end
+fonts.encodings=fonts.encodings or {}
+local reencodings={}
+fonts.encodings.reencodings=reencodings
+local function specialreencode(tfmdata,value)
+ local encoding=value and reencodings[value]
+ if encoding then
+ local temp={}
+ local char=tfmdata.characters
+ for k,v in next,encoding do
+ temp[k]=char[v]
+ end
+ for k,v in next,temp do
+ char[k]=temp[k]
+ end
+ return string.format("reencoded:%s",value)
+ end
+end
+local function reencode(tfmdata,value)
+ tfmdata.postprocessors=tfmdata.postprocessors or {}
+ table.insert(tfmdata.postprocessors,
+ function(tfmdata)
+ return specialreencode(tfmdata,value)
+ end
+ )
+end
+otffeatures.register {
+ name="reencode",
+ description="reencode characters",
+ manipulators={
+ base=reencode,
+ node=reencode,
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-cbk']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local nodes=nodes
+local traverse_id=node.traverse_id
+local glyph_code=nodes.nodecodes.glyph
+local disc_code=nodes.nodecodes.disc
+local ligaturing=node.ligaturing
+local kerning=node.kerning
+local basepass=true
+local function l_warning() texio.write_nl("warning: node.ligaturing called directly") l_warning=nil end
+local function k_warning() texio.write_nl("warning: node.kerning called directly") k_warning=nil end
+function node.ligaturing(...)
+ if basepass and l_warning then
+ l_warning()
+ end
+ return ligaturing(...)
+end
+function node.kerning(...)
+ if basepass and k_warning then
+ k_warning()
+ end
+ return kerning(...)
+end
+function nodes.handlers.setbasepass(v)
+ basepass=v
+end
+function nodes.handlers.nodepass(head)
+ local fontdata=fonts.hashes.identifiers
+ if fontdata then
+ local usedfonts={}
+ local basefonts={}
+ local prevfont=nil
+ local basefont=nil
+ for n in traverse_id(glyph_code,head) do
+ local font=n.font
+ if font~=prevfont then
+ if basefont then
+ basefont[2]=n.prev
+ end
+ prevfont=font
+ local used=usedfonts[font]
+ if not used then
+ local tfmdata=fontdata[font]
+ if tfmdata then
+ local shared=tfmdata.shared
+ if shared then
+ local processors=shared.processes
+ if processors and #processors>0 then
+ usedfonts[font]=processors
+ elseif basepass then
+ basefont={ n,nil }
+ basefonts[#basefonts+1]=basefont
+ end
+ end
+ end
+ end
+ end
+ end
+ for d in traverse_id(disc_code,head) do
+ local r=d.replace
+ if r then
+ for n in traverse_id(glyph_code,r) do
+ local font=n.font
+ if font~=prevfont then
+ prevfont=font
+ local used=usedfonts[font]
+ if not used then
+ local tfmdata=fontdata[font]
+ if tfmdata then
+ local shared=tfmdata.shared
+ if shared then
+ local processors=shared.processes
+ if processors and #processors>0 then
+ usedfonts[font]=processors
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if next(usedfonts) then
+ for font,processors in next,usedfonts do
+ for i=1,#processors do
+ head=processors[i](head,font,0) or head
+ end
+ end
+ end
+ if basepass and #basefonts>0 then
+ for i=1,#basefonts do
+ local range=basefonts[i]
+ local start=range[1]
+ local stop=range[2]
+ if stop then
+ start,stop=ligaturing(start,stop)
+ start,stop=kerning(start,stop)
+ elseif start then
+ start=ligaturing(start)
+ start=kerning(start)
+ end
+ end
+ end
+ return head,true
+ else
+ return head,false
+ end
+end
+function nodes.handlers.basepass(head)
+ if not basepass then
+ head=ligaturing(head)
+ head=kerning(head)
+ end
+ return head,true
+end
+local nodepass=nodes.handlers.nodepass
+local basepass=nodes.handlers.basepass
+local injectpass=nodes.injections.handler
+local protectpass=nodes.handlers.protectglyphs
+function nodes.simple_font_handler(head)
+ if head then
+ head=nodepass(head)
+ head=injectpass(head)
+ head=basepass(head)
+ protectpass(head)
+ return head,true
+ else
+ return head,false
+ end
+end
+
+end -- closure
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-gen.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-basics-gen.lua
index c19a49af39f..c4d6536046a 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-gen.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-basics-gen.lua
@@ -15,8 +15,13 @@ local dummyfunction = function()
end
local dummyreporter = function(c)
- return function(...)
- (texio.reporter or texio.write_nl)(c .. " : " .. string.formatters(...))
+ return function(f,...)
+ local r = texio.reporter or texio.write_nl
+ if f then
+ r(c .. " : " .. string.formatters(f,...))
+ else
+ r("")
+ end
end
end
@@ -351,7 +356,12 @@ end
--
function table.setmetatableindex(t,f)
+ if type(t) ~= "table" then
+ f = f or t
+ t = { }
+ end
setmetatable(t,{ __index = f })
+ return t
end
-- helper for plain:
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-nod.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-basics-nod.lua
index 373dab5a8c7..39400a3d001 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-nod.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-basics-nod.lua
@@ -45,7 +45,7 @@ attributes.private = attributes.private or function(name)
return number
end
--- Nodes:
+-- Nodes (a subset of context so that we don't get too much unused code):
nodes = { }
nodes.pool = { }
@@ -56,6 +56,9 @@ local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gs
local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" }
local disccodes = { [0] = "discretionary", "explicit", "automatic", "regular", "first", "second" }
+for i=0,#glyphcodes do glyphcodes[glyphcodes[i]] = i end
+for i=0,#disccodes do disccodes [disccodes [i]] = i end
+
nodes.nodecodes = nodecodes
nodes.whatcodes = whatcodes
nodes.whatsitcodes = whatcodes
@@ -105,10 +108,8 @@ function nodes.pool.kern(k)
return n
end
--- experimental
-
-local getfield = node.getfield or function(n,tag) return n[tag] end
-local setfield = node.setfield or function(n,tag,value) n[tag] = value end
+local getfield = node.getfield
+local setfield = node.setfield
nodes.getfield = getfield
nodes.setfield = setfield
@@ -116,17 +117,6 @@ nodes.setfield = setfield
nodes.getattr = getfield
nodes.setattr = setfield
-if node.getid then nodes.getid = node.getid else function nodes.getid (n) return getfield(n,"id") end end
-if node.getsubtype then nodes.getsubtype = node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end
-if node.getnext then nodes.getnext = node.getnext else function nodes.getnext (n) return getfield(n,"next") end end
-if node.getprev then nodes.getprev = node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end
-if node.getchar then nodes.getchar = node.getchar else function nodes.getchar (n) return getfield(n,"char") end end
-if node.getfont then nodes.getfont = node.getfont else function nodes.getfont (n) return getfield(n,"font") end end
-if node.getlist then nodes.getlist = node.getlist else function nodes.getlist (n) return getfield(n,"list") end end
-
-function nodes.tonut (n) return n end
-function nodes.tonode(n) return n end
-
-- being lazy ... just copy a bunch ... not all needed in generic but we assume
-- nodes to be kind of private anyway
@@ -167,12 +157,95 @@ nodes.unset_attribute = node.unset_attribute
nodes.protect_glyphs = node.protect_glyphs
nodes.unprotect_glyphs = node.unprotect_glyphs
-nodes.kerning = node.kerning
-nodes.ligaturing = node.ligaturing
+-----.kerning = node.kerning
+-----.ligaturing = node.ligaturing
nodes.mlist_to_hlist = node.mlist_to_hlist
-- in generic code, at least for some time, we stay nodes, while in context
-- we can go nuts (e.g. experimental); this split permits us us keep code
-- used elsewhere stable but at the same time play around in context
-nodes.nuts = nodes
+local direct = node.direct
+local nuts = { }
+nodes.nuts = nuts
+
+local tonode = direct.tonode
+local tonut = direct.todirect
+
+nodes.tonode = tonode
+nodes.tonut = tonut
+
+nuts.tonode = tonode
+nuts.tonut = tonut
+
+
+local getfield = direct.getfield
+local setfield = direct.setfield
+
+nuts.getfield = getfield
+nuts.setfield = setfield
+nuts.getnext = direct.getnext
+nuts.getprev = direct.getprev
+nuts.getid = direct.getid
+nuts.getattr = getfield
+nuts.setattr = setfield
+nuts.getfont = direct.getfont
+nuts.getsubtype = direct.getsubtype
+nuts.getchar = direct.getchar
+
+nuts.insert_before = direct.insert_before
+nuts.insert_after = direct.insert_after
+nuts.delete = direct.delete
+nuts.copy = direct.copy
+nuts.copy_list = direct.copy_list
+nuts.tail = direct.tail
+nuts.flush_list = direct.flush_list
+nuts.free = direct.free
+nuts.remove = direct.remove
+nuts.is_node = direct.is_node
+nuts.end_of_math = direct.end_of_math
+nuts.traverse = direct.traverse
+nuts.traverse_id = direct.traverse_id
+
+nuts.getprop = nuts.getattr
+nuts.setprop = nuts.setattr
+
+local new_nut = direct.new
+nuts.new = new_nut
+nuts.pool = { }
+
+function nuts.pool.kern(k)
+ local n = new_nut("kern",1)
+ setfield(n,"kern",k)
+ return n
+end
+
+-- properties as used in the (new) injector:
+
+local propertydata = direct.get_properties_table()
+nodes.properties = { data = propertydata }
+
+direct.set_properties_mode(true,true) -- needed for injection
+
+function direct.set_properties_mode() end -- we really need the set modes
+
+nuts.getprop = function(n,k)
+ local p = propertydata[n]
+ if p then
+ return p[k]
+ end
+end
+
+nuts.setprop = function(n,k,v)
+ if v then
+ local p = propertydata[n]
+ if p then
+ p[k] = v
+ else
+ propertydata[n] = { [k] = v }
+ end
+ end
+end
+
+nodes.setprop = nodes.setproperty
+nodes.getprop = nodes.getproperty
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-basics.tex b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-basics.tex
new file mode 100644
index 00000000000..1180c68e6c5
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-basics.tex
@@ -0,0 +1,93 @@
+%D \module
+%D [ file=luatex-basics,
+%D version=2009.12.01,
+%D title=\LUATEX\ Support Macros,
+%D subtitle=Attribute Allocation,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+
+%D As soon as we feel the need this file will file will contain an extension
+%D to the standard plain register allocation. For the moment we stick to a
+%D rather dumb attribute allocator. We start at 256 because we don't want
+%D any interference with the attributes used in the font handler.
+
+\ifx\newattribute\undefined \else \endinput \fi
+
+\newcount \lastallocatedattribute \lastallocatedattribute=255
+
+\def\newattribute#1%
+ {\global\advance\lastallocatedattribute 1
+ \attributedef#1\lastallocatedattribute}
+
+% maybe we will have luatex-basics.lua some day for instance when more
+% (pdf) primitives have moved to macros)
+
+\directlua {
+
+ gadgets = gadgets or { } % reserved namespace
+
+ gadgets.functions = { }
+ local registered = {}
+
+ function gadgets.functions.reverve()
+ local numb = newtoken.scan_int()
+ local name = newtoken.scan_string()
+ local okay = string.gsub(name,"[\string\\ ]","")
+ registered[okay] = numb
+ texio.write_nl("reserving lua function '"..okay.."' with number "..numb)
+ end
+
+ function gadgets.functions.register(name,f)
+ local okay = string.gsub(name,"[\string\\ ]","")
+ local numb = registered[okay]
+ if numb then
+ texio.write_nl("registering lua function '"..okay.."' with number "..numb)
+ lua.get_functions_table()[numb] = f
+ else
+ texio.write_nl("lua function '"..okay.."' is not reserved")
+ end
+ end
+
+}
+
+\newcount\lastallocatedluafunction
+
+\def\newluafunction#1%
+ {\ifdefined#1\else
+ \global\advance\lastallocatedluafunction 1
+ \global\chardef#1\lastallocatedluafunction
+ \directlua{gadgets.functions.reserve()}#1{\detokenize{#1}}%
+ \fi}
+
+% an example of usage (if we ever support it it will go to the plain gadgets module):
+%
+% \directlua {
+%
+% local cct = nil
+% local chr = nil
+%
+% gadgets.functions.register("UcharcatLuaOne",function()
+% chr = newtoken.scan_int()
+% cct = tex.getcatcode(chr)
+% tex.setcatcode(chr,newtoken.scan_int())
+% tex.sprint(unicode.utf8.char(chr))
+% end)
+%
+% gadgets.functions.register("UcharcatLuaTwo",function()
+% tex.setcatcode(chr,cct)
+% end)
+%
+% }
+%
+% \def\Ucharcat
+% {\expandafter\expandafter\expandafter\luafunction
+% \expandafter\expandafter\expandafter\UcharcatLuaTwo
+% \luafunction\UcharcatLuaOne}
+%
+% A:\the\catcode65:\Ucharcat 65 11:A:\the\catcode65\par
+% A:\the\catcode65:\Ucharcat 65 5:A:\the\catcode65\par
+% A:\the\catcode65:\Ucharcat 65 11:A:\the\catcode65\par
+
+
+\endinput
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-data-con.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-data-con.lua
new file mode 100644
index 00000000000..240538df2db
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-data-con.lua
@@ -0,0 +1,138 @@
+if not modules then modules = { } end modules ['data-con'] = {
+ version = 1.100,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format, lower, gsub = string.format, string.lower, string.gsub
+
+local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
+local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
+local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
+
+--[[ldx--
+<p>Once we found ourselves defining similar cache constructs
+several times, containers were introduced. Containers are used
+to collect tables in memory and reuse them when possible based
+on (unique) hashes (to be provided by the calling function).</p>
+
+<p>Caching to disk is disabled by default. Version numbers are
+stored in the saved table which makes it possible to change the
+table structures without bothering about the disk cache.</p>
+
+<p>Examples of usage can be found in the font related code.</p>
+--ldx]]--
+
+containers = containers or { }
+local containers = containers
+containers.usecache = true
+
+local report_containers = logs.reporter("resolvers","containers")
+
+local allocated = { }
+
+local mt = {
+ __index = function(t,k)
+ if k == "writable" then
+ local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable = writable
+ return writable
+ elseif k == "readables" then
+ local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables = readables
+ return readables
+ end
+ end,
+ __storage__ = true
+}
+
+function containers.define(category, subcategory, version, enabled)
+ if category and subcategory then
+ local c = allocated[category]
+ if not c then
+ c = { }
+ allocated[category] = c
+ end
+ local s = c[subcategory]
+ if not s then
+ s = {
+ category = category,
+ subcategory = subcategory,
+ storage = { },
+ enabled = enabled,
+ version = version or math.pi, -- after all, this is TeX
+ trace = false,
+ -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
+ -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
+ }
+ setmetatable(s,mt)
+ c[subcategory] = s
+ end
+ return s
+ end
+end
+
+function containers.is_usable(container,name)
+ return container.enabled and caches and caches.is_writable(container.writable, name)
+end
+
+function containers.is_valid(container,name)
+ if name and name ~= "" then
+ local storage = container.storage[name]
+ return storage and storage.cache_version == container.version
+ else
+ return false
+ end
+end
+
+function containers.read(container,name)
+ local storage = container.storage
+ local stored = storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored = caches.loaddata(container.readables,name)
+ if stored and stored.cache_version == container.version then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","load",container.subcategory,name)
+ end
+ else
+ stored = nil
+ end
+ storage[name] = stored
+ elseif stored then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
+ end
+ end
+ return stored
+end
+
+function containers.write(container, name, data)
+ if data then
+ data.cache_version = container.version
+ if container.enabled and caches then
+ local unique, shared = data.unique, data.shared
+ data.unique, data.shared = nil, nil
+ caches.savedata(container.writable, name, data)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","save",container.subcategory,name)
+ end
+ data.unique, data.shared = unique, shared
+ end
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","store",container.subcategory,name)
+ end
+ container.storage[name] = data
+ end
+ return data
+end
+
+function containers.content(container,name)
+ return container.storage[name]
+end
+
+function containers.cleanname(name)
+ -- return (gsub(lower(name),"[^%w]+","-"))
+ return (gsub(lower(name),"[^%w\128-\255]+","-")) -- more utf friendly
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-afk.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-afk.lua
new file mode 100644
index 00000000000..8b65b063184
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-afk.lua
@@ -0,0 +1,200 @@
+if not modules then modules = { } end modules ['font-afk'] = {
+ version = 1.001,
+ comment = "companion to font-afm.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+ dataonly = true,
+}
+
+--[[ldx--
+<p>For ligatures, only characters with a code smaller than 128 make sense,
+anything larger is encoding dependent. An interesting complication is that a
+character can be in an encoding twice but is hashed once.</p>
+--ldx]]--
+
+local allocate = utilities.storage.allocate
+
+fonts.handlers.afm.helpdata = {
+ ligatures = allocate { -- okay, nowadays we could parse the name but type 1 fonts
+ ['f'] = { -- don't have that many ligatures anyway
+ { 'f', 'ff' },
+ { 'i', 'fi' },
+ { 'l', 'fl' },
+ },
+ ['ff'] = {
+ { 'i', 'ffi' }
+ },
+ ['fi'] = {
+ { 'i', 'fii' }
+ },
+ ['fl'] = {
+ { 'i', 'fli' }
+ },
+ ['s'] = {
+ { 't', 'st' }
+ },
+ ['i'] = {
+ { 'j', 'ij' }
+ },
+ },
+ texligatures = allocate {
+ -- ['space'] = {
+ -- { 'L', 'Lslash' },
+ -- { 'l', 'lslash' }
+ -- },
+ -- ['question'] = {
+ -- { 'quoteleft', 'questiondown' }
+ -- },
+ -- ['exclam'] = {
+ -- { 'quoteleft', 'exclamdown' }
+ -- },
+ ['quoteleft'] = {
+ { 'quoteleft', 'quotedblleft' }
+ },
+ ['quoteright'] = {
+ { 'quoteright', 'quotedblright' }
+ },
+ ['hyphen'] = {
+ { 'hyphen', 'endash' }
+ },
+ ['endash'] = {
+ { 'hyphen', 'emdash' }
+ }
+ },
+ leftkerned = allocate {
+ AEligature = "A", aeligature = "a",
+ OEligature = "O", oeligature = "o",
+ IJligature = "I", ijligature = "i",
+ AE = "A", ae = "a",
+ OE = "O", oe = "o",
+ IJ = "I", ij = "i",
+ Ssharp = "S", ssharp = "s",
+ },
+ rightkerned = allocate {
+ AEligature = "E", aeligature = "e",
+ OEligature = "E", oeligature = "e",
+ IJligature = "J", ijligature = "j",
+ AE = "E", ae = "e",
+ OE = "E", oe = "e",
+ IJ = "J", ij = "j",
+ Ssharp = "S", ssharp = "s",
+ },
+ bothkerned = allocate {
+ Acircumflex = "A", acircumflex = "a",
+ Ccircumflex = "C", ccircumflex = "c",
+ Ecircumflex = "E", ecircumflex = "e",
+ Gcircumflex = "G", gcircumflex = "g",
+ Hcircumflex = "H", hcircumflex = "h",
+ Icircumflex = "I", icircumflex = "i",
+ Jcircumflex = "J", jcircumflex = "j",
+ Ocircumflex = "O", ocircumflex = "o",
+ Scircumflex = "S", scircumflex = "s",
+ Ucircumflex = "U", ucircumflex = "u",
+ Wcircumflex = "W", wcircumflex = "w",
+ Ycircumflex = "Y", ycircumflex = "y",
+
+ Agrave = "A", agrave = "a",
+ Egrave = "E", egrave = "e",
+ Igrave = "I", igrave = "i",
+ Ograve = "O", ograve = "o",
+ Ugrave = "U", ugrave = "u",
+ Ygrave = "Y", ygrave = "y",
+
+ Atilde = "A", atilde = "a",
+ Itilde = "I", itilde = "i",
+ Otilde = "O", otilde = "o",
+ Utilde = "U", utilde = "u",
+ Ntilde = "N", ntilde = "n",
+
+ Adiaeresis = "A", adiaeresis = "a", Adieresis = "A", adieresis = "a",
+ Ediaeresis = "E", ediaeresis = "e", Edieresis = "E", edieresis = "e",
+ Idiaeresis = "I", idiaeresis = "i", Idieresis = "I", idieresis = "i",
+ Odiaeresis = "O", odiaeresis = "o", Odieresis = "O", odieresis = "o",
+ Udiaeresis = "U", udiaeresis = "u", Udieresis = "U", udieresis = "u",
+ Ydiaeresis = "Y", ydiaeresis = "y", Ydieresis = "Y", ydieresis = "y",
+
+ Aacute = "A", aacute = "a",
+ Cacute = "C", cacute = "c",
+ Eacute = "E", eacute = "e",
+ Iacute = "I", iacute = "i",
+ Lacute = "L", lacute = "l",
+ Nacute = "N", nacute = "n",
+ Oacute = "O", oacute = "o",
+ Racute = "R", racute = "r",
+ Sacute = "S", sacute = "s",
+ Uacute = "U", uacute = "u",
+ Yacute = "Y", yacute = "y",
+ Zacute = "Z", zacute = "z",
+
+ Dstroke = "D", dstroke = "d",
+ Hstroke = "H", hstroke = "h",
+ Tstroke = "T", tstroke = "t",
+
+ Cdotaccent = "C", cdotaccent = "c",
+ Edotaccent = "E", edotaccent = "e",
+ Gdotaccent = "G", gdotaccent = "g",
+ Idotaccent = "I", idotaccent = "i",
+ Zdotaccent = "Z", zdotaccent = "z",
+
+ Amacron = "A", amacron = "a",
+ Emacron = "E", emacron = "e",
+ Imacron = "I", imacron = "i",
+ Omacron = "O", omacron = "o",
+ Umacron = "U", umacron = "u",
+
+ Ccedilla = "C", ccedilla = "c",
+ Kcedilla = "K", kcedilla = "k",
+ Lcedilla = "L", lcedilla = "l",
+ Ncedilla = "N", ncedilla = "n",
+ Rcedilla = "R", rcedilla = "r",
+ Scedilla = "S", scedilla = "s",
+ Tcedilla = "T", tcedilla = "t",
+
+ Ohungarumlaut = "O", ohungarumlaut = "o",
+ Uhungarumlaut = "U", uhungarumlaut = "u",
+
+ Aogonek = "A", aogonek = "a",
+ Eogonek = "E", eogonek = "e",
+ Iogonek = "I", iogonek = "i",
+ Uogonek = "U", uogonek = "u",
+
+ Aring = "A", aring = "a",
+ Uring = "U", uring = "u",
+
+ Abreve = "A", abreve = "a",
+ Ebreve = "E", ebreve = "e",
+ Gbreve = "G", gbreve = "g",
+ Ibreve = "I", ibreve = "i",
+ Obreve = "O", obreve = "o",
+ Ubreve = "U", ubreve = "u",
+
+ Ccaron = "C", ccaron = "c",
+ Dcaron = "D", dcaron = "d",
+ Ecaron = "E", ecaron = "e",
+ Lcaron = "L", lcaron = "l",
+ Ncaron = "N", ncaron = "n",
+ Rcaron = "R", rcaron = "r",
+ Scaron = "S", scaron = "s",
+ Tcaron = "T", tcaron = "t",
+ Zcaron = "Z", zcaron = "z",
+
+ dotlessI = "I", dotlessi = "i",
+ dotlessJ = "J", dotlessj = "j",
+
+ AEligature = "AE", aeligature = "ae", AE = "AE", ae = "ae",
+ OEligature = "OE", oeligature = "oe", OE = "OE", oe = "oe",
+ IJligature = "IJ", ijligature = "ij", IJ = "IJ", ij = "ij",
+
+ Lstroke = "L", lstroke = "l", Lslash = "L", lslash = "l",
+ Ostroke = "O", ostroke = "o", Oslash = "O", oslash = "o",
+
+ Ssharp = "SS", ssharp = "ss",
+
+ Aumlaut = "A", aumlaut = "a",
+ Eumlaut = "E", eumlaut = "e",
+ Iumlaut = "I", iumlaut = "i",
+ Oumlaut = "O", oumlaut = "o",
+ Uumlaut = "U", uumlaut = "u",
+ }
+}
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-afm.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-afm.lua
new file mode 100644
index 00000000000..329639b85ca
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-afm.lua
@@ -0,0 +1,1047 @@
+if not modules then modules = { } end modules ['font-afm'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+<p>Some code may look a bit obscure but this has to do with the
+fact that we also use this code for testing and much code evolved
+in the transition from <l n='tfm'/> to <l n='afm'/> to <l
+n='otf'/>.</p>
+
+<p>The following code still has traces of intermediate font support
+where we handles font encodings. Eventually font encoding goes
+away.</p>
+
+<p>The embedding of a font involves creating temporary files and
+depending on your system setup that can fail. It took more than a
+day to figure out why sometimes embedding failed in mingw luatex
+where running on a real path like c:\... failed while running on
+say e:\... being a link worked well. The native windows binaries
+don't have this issue.</p>
+
+--ldx]]--
+
+local fonts, logs, trackers, containers, resolvers = fonts, logs, trackers, containers, resolvers
+
+local next, type, tonumber = next, type, tonumber
+local format, match, gmatch, lower, gsub, strip = string.format, string.match, string.gmatch, string.lower, string.gsub, string.strip
+local abs = math.abs
+local P, S, C, R, lpegmatch, patterns = lpeg.P, lpeg.S, lpeg.C, lpeg.R, lpeg.match, lpeg.patterns
+local derivetable = table.derive
+
+local trace_features = false trackers.register("afm.features", function(v) trace_features = v end)
+local trace_indexing = false trackers.register("afm.indexing", function(v) trace_indexing = v end)
+local trace_loading = false trackers.register("afm.loading", function(v) trace_loading = v end)
+local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+
+local report_afm = logs.reporter("fonts","afm loading")
+
+local setmetatableindex = table.setmetatableindex
+
+local findbinfile = resolvers.findbinfile
+
+local definers = fonts.definers
+local readers = fonts.readers
+local constructors = fonts.constructors
+
+local fontloader = fontloader
+local font_to_table = fontloader.to_table
+local open_font = fontloader.open
+local close_font = fontloader.close
+
+local afm = constructors.newhandler("afm")
+local pfb = constructors.newhandler("pfb")
+
+local afmfeatures = constructors.newfeatures("afm")
+local registerafmfeature = afmfeatures.register
+
+afm.version = 1.500 -- incrementing this number one up will force a re-cache
+afm.cache = containers.define("fonts", "afm", afm.version, true)
+afm.autoprefixed = true -- this will become false some day (catches texnansi-blabla.*)
+
+afm.helpdata = { } -- set later on so no local for this
+afm.syncspace = true -- when true, nicer stretch values
+afm.addligatures = true -- best leave this set to true
+afm.addtexligatures = true -- best leave this set to true
+afm.addkerns = true -- best leave this set to true
+
+local overloads = fonts.mappings.overloads
+
+local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes
+
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode = lower(value)
+ end
+end
+
+registerafmfeature {
+ name = "mode",
+ description = "mode",
+ initializers = {
+ base = setmode,
+ node = setmode,
+ }
+}
+
+--[[ldx--
+<p>We start with the basic reader which we give a name similar to the
+built in <l n='tfm'/> and <l n='otf'/> reader.</p>
+--ldx]]--
+
+--~ Comment FONTIDENTIFIER LMMATHSYMBOLS10
+--~ Comment CODINGSCHEME TEX MATH SYMBOLS
+--~ Comment DESIGNSIZE 10.0 pt
+--~ Comment CHECKSUM O 4261307036
+--~ Comment SPACE 0 plus 0 minus 0
+--~ Comment QUAD 1000
+--~ Comment EXTRASPACE 0
+--~ Comment NUM 676.508 393.732 443.731
+--~ Comment DENOM 685.951 344.841
+--~ Comment SUP 412.892 362.892 288.889
+--~ Comment SUB 150 247.217
+--~ Comment SUPDROP 386.108
+--~ Comment SUBDROP 50
+--~ Comment DELIM 2390 1010
+--~ Comment AXISHEIGHT 250
+
+local comment = P("Comment")
+local spacing = patterns.spacer -- S(" \t")^1
+local lineend = patterns.newline -- S("\n\r")
+local words = C((1 - lineend)^1)
+local number = C((R("09") + S("."))^1) / tonumber * spacing^0
+local data = lpeg.Carg(1)
+
+local pattern = ( -- needs testing ... not used anyway as we no longer need math afm's
+ comment * spacing *
+ (
+ data * (
+ ("CODINGSCHEME" * spacing * words ) / function(fd,a) end +
+ ("DESIGNSIZE" * spacing * number * words ) / function(fd,a) fd[ 1] = a end +
+ ("CHECKSUM" * spacing * number * words ) / function(fd,a) fd[ 2] = a end +
+ ("SPACE" * spacing * number * "plus" * number * "minus" * number) / function(fd,a,b,c) fd[ 3], fd[ 4], fd[ 5] = a, b, c end +
+ ("QUAD" * spacing * number ) / function(fd,a) fd[ 6] = a end +
+ ("EXTRASPACE" * spacing * number ) / function(fd,a) fd[ 7] = a end +
+ ("NUM" * spacing * number * number * number ) / function(fd,a,b,c) fd[ 8], fd[ 9], fd[10] = a, b, c end +
+ ("DENOM" * spacing * number * number ) / function(fd,a,b ) fd[11], fd[12] = a, b end +
+ ("SUP" * spacing * number * number * number ) / function(fd,a,b,c) fd[13], fd[14], fd[15] = a, b, c end +
+ ("SUB" * spacing * number * number ) / function(fd,a,b) fd[16], fd[17] = a, b end +
+ ("SUPDROP" * spacing * number ) / function(fd,a) fd[18] = a end +
+ ("SUBDROP" * spacing * number ) / function(fd,a) fd[19] = a end +
+ ("DELIM" * spacing * number * number ) / function(fd,a,b) fd[20], fd[21] = a, b end +
+ ("AXISHEIGHT" * spacing * number ) / function(fd,a) fd[22] = a end
+ )
+ + (1-lineend)^0
+ )
+ + (1-comment)^1
+)^0
+
+local function scan_comment(str)
+ local fd = { }
+ lpegmatch(pattern,str,1,fd)
+ return fd
+end
+
+-- On a rainy day I will rewrite this in lpeg ... or we can use the (slower) fontloader
+-- as in now supports afm/pfb loading but it's not too bad to have different methods
+-- for testing approaches.
+
+local keys = { }
+
+function keys.FontName (data,line) data.metadata.fontname = strip (line) -- get rid of spaces
+ data.metadata.fullname = strip (line) end
+function keys.ItalicAngle (data,line) data.metadata.italicangle = tonumber (line) end
+function keys.IsFixedPitch(data,line) data.metadata.monospaced = toboolean(line,true) end
+function keys.CharWidth (data,line) data.metadata.charwidth = tonumber (line) end
+function keys.XHeight (data,line) data.metadata.xheight = tonumber (line) end
+function keys.Descender (data,line) data.metadata.descender = tonumber (line) end
+function keys.Ascender (data,line) data.metadata.ascender = tonumber (line) end
+function keys.Comment (data,line)
+ -- Comment DesignSize 12 (pts)
+ -- Comment TFM designsize: 12 (in points)
+ line = lower(line)
+ local designsize = match(line,"designsize[^%d]*(%d+)")
+ if designsize then data.metadata.designsize = tonumber(designsize) end
+end
+
+local function get_charmetrics(data,charmetrics,vector)
+ local characters = data.characters
+ local chr, ind = { }, 0
+ for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do
+ if k == 'C' then
+ v = tonumber(v)
+ if v < 0 then
+ ind = ind + 1 -- ?
+ else
+ ind = v
+ end
+ chr = {
+ index = ind
+ }
+ elseif k == 'WX' then
+ chr.width = tonumber(v)
+ elseif k == 'N' then
+ characters[v] = chr
+ elseif k == 'B' then
+ local llx, lly, urx, ury = match(v,"^ *(.-) +(.-) +(.-) +(.-)$")
+ chr.boundingbox = { tonumber(llx), tonumber(lly), tonumber(urx), tonumber(ury) }
+ elseif k == 'L' then
+ local plus, becomes = match(v,"^(.-) +(.-)$")
+ local ligatures = chr.ligatures
+ if ligatures then
+ ligatures[plus] = becomes
+ else
+ chr.ligatures = { [plus] = becomes }
+ end
+ end
+ end
+end
+
+local function get_kernpairs(data,kernpairs)
+ local characters = data.characters
+ for one, two, value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do
+ local chr = characters[one]
+ if chr then
+ local kerns = chr.kerns
+ if kerns then
+ kerns[two] = tonumber(value)
+ else
+ chr.kerns = { [two] = tonumber(value) }
+ end
+ end
+ end
+end
+
+local function get_variables(data,fontmetrics)
+ for key, rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do
+ local keyhandler = keys[key]
+ if keyhandler then
+ keyhandler(data,rest)
+ end
+ end
+end
+
+local function get_indexes(data,pfbname)
+ data.resources.filename = resolvers.unresolve(pfbname) -- no shortcut
+ local pfbblob = open_font(pfbname)
+ if pfbblob then
+ local characters = data.characters
+ local pfbdata = font_to_table(pfbblob)
+ if pfbdata then
+ local glyphs = pfbdata.glyphs
+ if glyphs then
+ if trace_loading then
+ report_afm("getting index data from %a",pfbname)
+ end
+ for index, glyph in next, glyphs do
+ -- for index, glyph in table.sortedhash(glyphs) do
+ local name = glyph.name
+ if name then
+ local char = characters[name]
+ if char then
+ if trace_indexing then
+ report_afm("glyph %a has index %a",name,index)
+ end
+ char.index = index
+ end
+ end
+ end
+ elseif trace_loading then
+ report_afm("no glyph data in pfb file %a",pfbname)
+ end
+ elseif trace_loading then
+ report_afm("no data in pfb file %a",pfbname)
+ end
+ close_font(pfbblob)
+ elseif trace_loading then
+ report_afm("invalid pfb file %a",pfbname)
+ end
+end
+
+local function readafm(filename)
+ local ok, afmblob, size = resolvers.loadbinfile(filename) -- has logging
+ if ok and afmblob then
+ local data = {
+ resources = {
+ filename = resolvers.unresolve(filename),
+ version = afm.version,
+ creator = "context mkiv",
+ },
+ properties = {
+ hasitalics = false,
+ },
+ goodies = {
+ },
+ metadata = {
+ filename = file.removesuffix(file.basename(filename))
+ },
+ characters = {
+ -- a temporary store
+ },
+ descriptions = {
+ -- the final store
+ },
+ }
+ afmblob = gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics", function(charmetrics)
+ if trace_loading then
+ report_afm("loading char metrics")
+ end
+ get_charmetrics(data,charmetrics,vector)
+ return ""
+ end)
+ afmblob = gsub(afmblob,"StartKernPairs(.-)EndKernPairs", function(kernpairs)
+ if trace_loading then
+ report_afm("loading kern pairs")
+ end
+ get_kernpairs(data,kernpairs)
+ return ""
+ end)
+ afmblob = gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics", function(version,fontmetrics)
+ if trace_loading then
+ report_afm("loading variables")
+ end
+ data.afmversion = version
+ get_variables(data,fontmetrics)
+ data.fontdimens = scan_comment(fontmetrics) -- todo: all lpeg, no time now
+ return ""
+ end)
+ return data
+ else
+ if trace_loading then
+ report_afm("no valid afm file %a",filename)
+ end
+ return nil
+ end
+end
+
+--[[ldx--
+<p>We cache files. Caching is taken care of in the loader. We cheat a bit
+by adding ligatures and kern information to the afm derived data. That
+way we can set them faster when defining a font.</p>
+--ldx]]--
+
+local addkerns, addligatures, addtexligatures, unify, normalize, fixnames -- we will implement these later
+
+function afm.load(filename)
+ -- hm, for some reasons not resolved yet
+ filename = resolvers.findfile(filename,'afm') or ""
+ if filename ~= "" and not fonts.names.ignoredfile(filename) then
+ local name = file.removesuffix(file.basename(filename))
+ local data = containers.read(afm.cache,name)
+ local attr = lfs.attributes(filename)
+ local size, time = attr.size or 0, attr.modification or 0
+ --
+ local pfbfile = file.replacesuffix(name,"pfb")
+ local pfbname = resolvers.findfile(pfbfile,"pfb") or ""
+ if pfbname == "" then
+ pfbname = resolvers.findfile(file.basename(pfbfile),"pfb") or ""
+ end
+ local pfbsize, pfbtime = 0, 0
+ if pfbname ~= "" then
+ local attr = lfs.attributes(pfbname)
+ pfbsize = attr.size or 0
+ pfbtime = attr.modification or 0
+ end
+ if not data or data.size ~= size or data.time ~= time or data.pfbsize ~= pfbsize or data.pfbtime ~= pfbtime then
+ report_afm("reading %a",filename)
+ data = readafm(filename)
+ if data then
+ if pfbname ~= "" then
+ get_indexes(data,pfbname)
+ elseif trace_loading then
+ report_afm("no pfb file for %a",filename)
+ -- data.resources.filename = "unset" -- better than loading the afm file
+ end
+ report_afm("unifying %a",filename)
+ unify(data,filename)
+ if afm.addligatures then
+ report_afm("add ligatures")
+ addligatures(data)
+ end
+ if afm.addtexligatures then
+ report_afm("add tex ligatures")
+ addtexligatures(data)
+ end
+ if afm.addkerns then
+ report_afm("add extra kerns")
+ addkerns(data)
+ end
+ normalize(data)
+ fixnames(data)
+ report_afm("add tounicode data")
+ fonts.mappings.addtounicode(data,filename)
+ data.size = size
+ data.time = time
+ data.pfbsize = pfbsize
+ data.pfbtime = pfbtime
+ report_afm("saving %a in cache",name)
+ data.resources.unicodes = nil -- consistent with otf but here we save not much
+ data = containers.write(afm.cache, name, data)
+ data = containers.read(afm.cache,name)
+ end
+ if applyruntimefixes and data then
+ applyruntimefixes(filename,data)
+ end
+ end
+ return data
+ else
+ return nil
+ end
+end
+
+local uparser = fonts.mappings.makenameparser()
+
+unify = function(data, filename)
+ local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context
+ local unicodes, names = { }, { }
+ local private = constructors.privateoffset
+ local descriptions = data.descriptions
+ for name, blob in next, data.characters do
+ local code = unicodevector[name] -- or characters.name_to_unicode[name]
+ if not code then
+ code = lpegmatch(uparser,name)
+ if not code then
+ code = private
+ private = private + 1
+ report_afm("assigning private slot %U for unknown glyph name %a",code,name)
+ end
+ end
+ local index = blob.index
+ unicodes[name] = code
+ names[name] = index
+ blob.name = name
+ descriptions[code] = {
+ boundingbox = blob.boundingbox,
+ width = blob.width,
+ kerns = blob.kerns,
+ index = index,
+ name = name,
+ }
+ end
+ for unicode, description in next, descriptions do
+ local kerns = description.kerns
+ if kerns then
+ local krn = { }
+ for name, kern in next, kerns do
+ local unicode = unicodes[name]
+ if unicode then
+ krn[unicode] = kern
+ else
+ -- print(unicode,name)
+ end
+ end
+ description.kerns = krn
+ end
+ end
+ data.characters = nil
+ local resources = data.resources
+ local filename = resources.filename or file.removesuffix(file.basename(filename))
+ resources.filename = resolvers.unresolve(filename) -- no shortcut
+ resources.unicodes = unicodes -- name to unicode
+ resources.marks = { } -- todo
+ -- resources.names = names -- name to index
+ resources.private = private
+end
+
+normalize = function(data)
+end
+
+fixnames = function(data)
+ for k, v in next, data.descriptions do
+ local n = v.name
+ local r = overloads[n]
+ if r then
+ local name = r.name
+ if trace_indexing then
+ report_afm("renaming characters %a to %a",n,name)
+ end
+ v.name = name
+ v.unicode = r.unicode
+ end
+ end
+end
+
+
+--[[ldx--
+<p>These helpers extend the basic table with extra ligatures, texligatures
+and extra kerns. This saves quite some lookups later.</p>
+--ldx]]--
+
+local addthem = function(rawdata,ligatures)
+ if ligatures then
+ local descriptions = rawdata.descriptions
+ local resources = rawdata.resources
+ local unicodes = resources.unicodes
+ -- local names = resources.names
+ for ligname, ligdata in next, ligatures do
+ local one = descriptions[unicodes[ligname]]
+ if one then
+ for _, pair in next, ligdata do
+ local two, three = unicodes[pair[1]], unicodes[pair[2]]
+ if two and three then
+ local ol = one.ligatures
+ if ol then
+ if not ol[two] then
+ ol[two] = three
+ end
+ else
+ one.ligatures = { [two] = three }
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+addligatures = function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end
+addtexligatures = function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end
+
+--[[ldx--
+<p>We keep the extra kerns in separate kerning tables so that we can use
+them selectively.</p>
+--ldx]]--
+
+-- This is rather old code (from the beginning when we had only tfm). If
+-- we unify the afm data (now we have names all over the place) then
+-- we can use shcodes but there will be many more looping then. But we
+-- could get rid of the tables in char-cmp then. Als, in the generic version
+-- we don't use the character database. (Ok, we can have a context specific
+-- variant).
+
+addkerns = function(rawdata) -- using shcodes is not robust here
+ local descriptions = rawdata.descriptions
+ local resources = rawdata.resources
+ local unicodes = resources.unicodes
+ local function do_it_left(what)
+ if what then
+ for unicode, description in next, descriptions do
+ local kerns = description.kerns
+ if kerns then
+ local extrakerns
+ for complex, simple in next, what do
+ complex = unicodes[complex]
+ simple = unicodes[simple]
+ if complex and simple then
+ local ks = kerns[simple]
+ if ks and not kerns[complex] then
+ if extrakerns then
+ extrakerns[complex] = ks
+ else
+ extrakerns = { [complex] = ks }
+ end
+ end
+ end
+ end
+ if extrakerns then
+ description.extrakerns = extrakerns
+ end
+ end
+ end
+ end
+ end
+ local function do_it_copy(what)
+ if what then
+ for complex, simple in next, what do
+ complex = unicodes[complex]
+ simple = unicodes[simple]
+ if complex and simple then
+ local complexdescription = descriptions[complex]
+ if complexdescription then -- optional
+ local simpledescription = descriptions[complex]
+ if simpledescription then
+ local extrakerns
+ local kerns = simpledescription.kerns
+ if kerns then
+ for unicode, kern in next, kerns do
+ if extrakerns then
+ extrakerns[unicode] = kern
+ else
+ extrakerns = { [unicode] = kern }
+ end
+ end
+ end
+ local extrakerns = simpledescription.extrakerns
+ if extrakerns then
+ for unicode, kern in next, extrakerns do
+ if extrakerns then
+ extrakerns[unicode] = kern
+ else
+ extrakerns = { [unicode] = kern }
+ end
+ end
+ end
+ if extrakerns then
+ complexdescription.extrakerns = extrakerns
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ -- add complex with values of simplified when present
+ do_it_left(afm.helpdata.leftkerned)
+ do_it_left(afm.helpdata.bothkerned)
+ -- copy kerns from simple char to complex char unless set
+ do_it_copy(afm.helpdata.bothkerned)
+ do_it_copy(afm.helpdata.rightkerned)
+end
+
+--[[ldx--
+<p>The copying routine looks messy (and is indeed a bit messy).</p>
+--ldx]]--
+
+local function adddimensions(data) -- we need to normalize afm to otf i.e. indexed table instead of name
+ if data then
+ for unicode, description in next, data.descriptions do
+ local bb = description.boundingbox
+ if bb then
+ local ht, dp = bb[4], -bb[2]
+ if ht == 0 or ht < 0 then
+ -- no need to set it and no negative heights, nil == 0
+ else
+ description.height = ht
+ end
+ if dp == 0 or dp < 0 then
+ -- no negative depths and no negative depths, nil == 0
+ else
+ description.depth = dp
+ end
+ end
+ end
+ end
+end
+
+local function copytotfm(data)
+ if data and data.descriptions then
+ local metadata = data.metadata
+ local resources = data.resources
+ local properties = derivetable(data.properties)
+ local descriptions = derivetable(data.descriptions)
+ local goodies = derivetable(data.goodies)
+ local characters = { }
+ local parameters = { }
+ local unicodes = resources.unicodes
+ --
+ for unicode, description in next, data.descriptions do -- use parent table
+ characters[unicode] = { }
+ end
+ --
+ local filename = constructors.checkedfilename(resources)
+ local fontname = metadata.fontname or metadata.fullname
+ local fullname = metadata.fullname or metadata.fontname
+ local endash = 0x0020 -- space
+ local emdash = 0x2014
+ local spacer = "space"
+ local spaceunits = 500
+ --
+ local monospaced = metadata.monospaced
+ local charwidth = metadata.charwidth
+ local italicangle = metadata.italicangle
+ local charxheight = metadata.xheight and metadata.xheight > 0 and metadata.xheight
+ properties.monospaced = monospaced
+ parameters.italicangle = italicangle
+ parameters.charwidth = charwidth
+ parameters.charxheight = charxheight
+ -- same as otf
+ if properties.monospaced then
+ if descriptions[endash] then
+ spaceunits, spacer = descriptions[endash].width, "space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits, spacer = descriptions[emdash].width, "emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits, spacer = charwidth, "charwidth"
+ end
+ else
+ if descriptions[endash] then
+ spaceunits, spacer = descriptions[endash].width, "space"
+ end
+ if not spaceunits and charwidth then
+ spaceunits, spacer = charwidth, "charwidth"
+ end
+ end
+ spaceunits = tonumber(spaceunits)
+ if spaceunits < 200 then
+ -- todo: warning
+ end
+ --
+ parameters.slant = 0
+ parameters.space = spaceunits
+ parameters.space_stretch = 500
+ parameters.space_shrink = 333
+ parameters.x_height = 400
+ parameters.quad = 1000
+ --
+ if italicangle and italicangle ~= 0 then
+ parameters.italicangle = italicangle
+ parameters.italicfactor = math.cos(math.rad(90+italicangle))
+ parameters.slant = - math.tan(italicangle*math.pi/180)
+ end
+ if monospaced then
+ parameters.space_stretch = 0
+ parameters.space_shrink = 0
+ elseif afm.syncspace then
+ parameters.space_stretch = spaceunits/2
+ parameters.space_shrink = spaceunits/3
+ end
+ parameters.extra_space = parameters.space_shrink
+ if charxheight then
+ parameters.x_height = charxheight
+ else
+ -- same as otf
+ local x = 0x0078 -- x
+ if x then
+ local x = descriptions[x]
+ if x then
+ parameters.x_height = x.height
+ end
+ end
+ --
+ end
+ local fd = data.fontdimens
+ if fd and fd[8] and fd[9] and fd[10] then -- math
+ for k,v in next, fd do
+ parameters[k] = v
+ end
+ end
+ --
+ parameters.designsize = (metadata.designsize or 10)*65536
+ parameters.ascender = abs(metadata.ascender or 0)
+ parameters.descender = abs(metadata.descender or 0)
+ parameters.units = 1000
+ --
+ properties.spacer = spacer
+ properties.encodingbytes = 2
+ properties.format = fonts.formats[filename] or "type1"
+ properties.filename = filename
+ properties.fontname = fontname
+ properties.fullname = fullname
+ properties.psname = fullname
+ properties.name = filename or fullname or fontname
+ --
+ if next(characters) then
+ return {
+ characters = characters,
+ descriptions = descriptions,
+ parameters = parameters,
+ resources = resources,
+ properties = properties,
+ goodies = goodies,
+ }
+ end
+ end
+ return nil
+end
+
+--[[ldx--
+<p>Originally we had features kind of hard coded for <l n='afm'/>
+files but since I expect to support more font formats, I decided
+to treat this fontformat like any other and handle features in a
+more configurable way.</p>
+--ldx]]--
+
+function afm.setfeatures(tfmdata,features)
+ local okay = constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm)
+ if okay then
+ return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm)
+ else
+ return { } -- will become false
+ end
+end
+
+local function addtables(data)
+ local resources = data.resources
+ local lookuptags = resources.lookuptags
+ local unicodes = resources.unicodes
+ if not lookuptags then
+ lookuptags = { }
+ resources.lookuptags = lookuptags
+ end
+ setmetatableindex(lookuptags,function(t,k)
+ local v = type(k) == "number" and ("lookup " .. k) or k
+ t[k] = v
+ return v
+ end)
+ if not unicodes then
+ unicodes = { }
+ resources.unicodes = unicodes
+ setmetatableindex(unicodes,function(t,k)
+ setmetatableindex(unicodes,nil)
+ for u, d in next, data.descriptions do
+ local n = d.name
+ if n then
+ t[n] = u
+ end
+ end
+ return rawget(t,k)
+ end)
+ end
+ constructors.addcoreunicodes(unicodes) -- do we really need this?
+end
+
+local function afmtotfm(specification)
+ local afmname = specification.filename or specification.name
+ if specification.forced == "afm" or specification.format == "afm" then -- move this one up
+ if trace_loading then
+ report_afm("forcing afm format for %a",afmname)
+ end
+ else
+ local tfmname = findbinfile(afmname,"ofm") or ""
+ if tfmname ~= "" then
+ if trace_loading then
+ report_afm("fallback from afm to tfm for %a",afmname)
+ end
+ return -- just that
+ end
+ end
+ if afmname ~= "" then
+ -- weird, isn't this already done then?
+ local features = constructors.checkedfeatures("afm",specification.features.normal)
+ specification.features.normal = features
+ constructors.hashinstance(specification,true) -- also weird here
+ --
+ specification = definers.resolve(specification) -- new, was forgotten
+ local cache_id = specification.hash
+ local tfmdata = containers.read(constructors.cache, cache_id) -- cache with features applied
+ if not tfmdata then
+ local rawdata = afm.load(afmname)
+ if rawdata and next(rawdata) then
+ addtables(rawdata)
+ adddimensions(rawdata)
+ tfmdata = copytotfm(rawdata)
+ if tfmdata and next(tfmdata) then
+ local shared = tfmdata.shared
+ if not shared then
+ shared = { }
+ tfmdata.shared = shared
+ end
+ shared.rawdata = rawdata
+ shared.features = features
+ shared.processes = afm.setfeatures(tfmdata,features)
+ end
+ elseif trace_loading then
+ report_afm("no (valid) afm file found with name %a",afmname)
+ end
+ tfmdata = containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+ end
+end
+
+--[[ldx--
+<p>As soon as we could intercept the <l n='tfm'/> reader, I implemented an
+<l n='afm'/> reader. Since traditional <l n='pdftex'/> could use <l n='opentype'/>
+fonts with <l n='afm'/> companions, the following method also could handle
+those cases, but now that we can handle <l n='opentype'/> directly we no longer
+need this features.</p>
+--ldx]]--
+
+local function read_from_afm(specification)
+ local tfmdata = afmtotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name = specification.name
+ tfmdata = constructors.scale(tfmdata, specification)
+ local allfeatures = tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm)
+ fonts.loggers.register(tfmdata,'afm',specification)
+ end
+ return tfmdata
+end
+
+--[[ldx--
+<p>Here comes the implementation of a few features. We only implement
+those that make sense for this format.</p>
+--ldx]]--
+
+local function prepareligatures(tfmdata,ligatures,value)
+ if value then
+ local descriptions = tfmdata.descriptions
+ local hasligatures = false
+ for unicode, character in next, tfmdata.characters do
+ local description = descriptions[unicode]
+ local dligatures = description.ligatures
+ if dligatures then
+ local cligatures = character.ligatures
+ if not cligatures then
+ cligatures = { }
+ character.ligatures = cligatures
+ end
+ for unicode, ligature in next, dligatures do
+ cligatures[unicode] = {
+ char = ligature,
+ type = 0
+ }
+ end
+ hasligatures = true
+ end
+ end
+ tfmdata.properties.hasligatures = hasligatures
+ end
+end
+
+local function preparekerns(tfmdata,kerns,value)
+ if value then
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata.resources
+ local unicodes = resources.unicodes
+ local descriptions = tfmdata.descriptions
+ local haskerns = false
+ for u, chr in next, tfmdata.characters do
+ local d = descriptions[u]
+ local newkerns = d[kerns]
+ if newkerns then
+ local kerns = chr.kerns
+ if not kerns then
+ kerns = { }
+ chr.kerns = kerns
+ end
+ for k,v in next, newkerns do
+ local uk = unicodes[k]
+ if uk then
+ kerns[uk] = v
+ end
+ end
+ haskerns = true
+ end
+ end
+ tfmdata.properties.haskerns = haskerns
+ end
+end
+
+local list = {
+ -- [0x0022] = 0x201D,
+ [0x0027] = 0x2019,
+ -- [0x0060] = 0x2018,
+}
+
+local function texreplacements(tfmdata,value)
+ local descriptions = tfmdata.descriptions
+ local characters = tfmdata.characters
+ for k, v in next, list do
+ characters [k] = characters [v] -- we forget about kerns
+ descriptions[k] = descriptions[v] -- we forget about kerns
+ end
+end
+
+local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures', value) end
+local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end
+local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns', value) end
+local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns', value) end
+
+registerafmfeature {
+ name = "liga",
+ description = "traditional ligatures",
+ initializers = {
+ base = ligatures,
+ node = ligatures,
+ }
+}
+
+registerafmfeature {
+ name = "kern",
+ description = "intercharacter kerning",
+ initializers = {
+ base = kerns,
+ node = kerns,
+ }
+}
+
+registerafmfeature {
+ name = "extrakerns",
+ description = "additional intercharacter kerning",
+ initializers = {
+ base = extrakerns,
+ node = extrakerns,
+ }
+}
+
+registerafmfeature {
+ name = 'tlig',
+ description = 'tex ligatures',
+ initializers = {
+ base = texligatures,
+ node = texligatures,
+ }
+}
+
+registerafmfeature {
+ name = 'trep',
+ description = 'tex replacements',
+ initializers = {
+ base = texreplacements,
+ node = texreplacements,
+ }
+}
+
+-- readers
+
+local check_tfm = readers.check_tfm
+
+fonts.formats.afm = "type1"
+fonts.formats.pfb = "type1"
+
+local function check_afm(specification,fullname)
+ local foundname = findbinfile(fullname, 'afm') or "" -- just to be sure
+ if foundname == "" then
+ foundname = fonts.names.getfilename(fullname,"afm") or ""
+ end
+ if foundname == "" and afm.autoprefixed then
+ local encoding, shortname = match(fullname,"^(.-)%-(.*)$") -- context: encoding-name.*
+ if encoding and shortname and fonts.encodings.known[encoding] then
+ shortname = findbinfile(shortname,'afm') or "" -- just to be sure
+ if shortname ~= "" then
+ foundname = shortname
+ if trace_defining then
+ report_afm("stripping encoding prefix from filename %a",afmname)
+ end
+ end
+ end
+ end
+ if foundname ~= "" then
+ specification.filename = foundname
+ specification.format = "afm"
+ return read_from_afm(specification)
+ end
+end
+
+function readers.afm(specification,method)
+ local fullname, tfmdata = specification.filename or "", nil
+ if fullname == "" then
+ local forced = specification.forced or ""
+ if forced ~= "" then
+ tfmdata = check_afm(specification,specification.name .. "." .. forced)
+ end
+ if not tfmdata then
+ method = method or definers.method or "afm or tfm"
+ if method == "tfm" then
+ tfmdata = check_tfm(specification,specification.name)
+ elseif method == "afm" then
+ tfmdata = check_afm(specification,specification.name)
+ elseif method == "tfm or afm" then
+ tfmdata = check_tfm(specification,specification.name) or check_afm(specification,specification.name)
+ else -- method == "afm or tfm" or method == "" then
+ tfmdata = check_afm(specification,specification.name) or check_tfm(specification,specification.name)
+ end
+ end
+ else
+ tfmdata = check_afm(specification,fullname)
+ end
+ return tfmdata
+end
+
+function readers.pfb(specification,method) -- only called when forced
+ local original = specification.specification
+ if trace_defining then
+ report_afm("using afm reader for %a",original)
+ end
+ specification.specification = gsub(original,"%.pfb",".afm")
+ specification.forced = "afm"
+ return readers.afm(specification,method)
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-cid.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-cid.lua
new file mode 100644
index 00000000000..0eaacdfbd54
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-cid.lua
@@ -0,0 +1,177 @@
+if not modules then modules = { } end modules ['font-cid'] = {
+ version = 1.001,
+ comment = "companion to font-otf.lua (cidmaps)",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format, match, lower = string.format, string.match, string.lower
+local tonumber = tonumber
+local P, S, R, C, V, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.match
+
+local fonts, logs, trackers = fonts, logs, trackers
+
+local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+
+local report_otf = logs.reporter("fonts","otf loading")
+
+local cid = { }
+fonts.cid = cid
+
+local cidmap = { }
+local cidmax = 10
+
+-- original string parser: 0.109, lpeg parser: 0.036 seconds for Adobe-CNS1-4.cidmap
+--
+-- 18964 18964 (leader)
+-- 0 /.notdef
+-- 1..95 0020
+-- 99 3000
+
+local number = C(R("09","af","AF")^1)
+local space = S(" \n\r\t")
+local spaces = space^0
+local period = P(".")
+local periods = period * period
+local name = P("/") * C((1-space)^1)
+
+local unicodes, names = { }, { } -- we could use Carg now
+
+local function do_one(a,b)
+ unicodes[tonumber(a)] = tonumber(b,16)
+end
+
+local function do_range(a,b,c)
+ c = tonumber(c,16)
+ for i=tonumber(a),tonumber(b) do
+ unicodes[i] = c
+ c = c + 1
+ end
+end
+
+local function do_name(a,b)
+ names[tonumber(a)] = b
+end
+
+local grammar = P { "start",
+ start = number * spaces * number * V("series"),
+ series = (spaces * (V("one") + V("range") + V("named")))^1,
+ one = (number * spaces * number) / do_one,
+ range = (number * periods * number * spaces * number) / do_range,
+ named = (number * spaces * name) / do_name
+}
+
+local function loadcidfile(filename)
+ local data = io.loaddata(filename)
+ if data then
+ unicodes, names = { }, { }
+ lpegmatch(grammar,data)
+ local supplement, registry, ordering = match(filename,"^(.-)%-(.-)%-()%.(.-)$")
+ return {
+ supplement = supplement,
+ registry = registry,
+ ordering = ordering,
+ filename = filename,
+ unicodes = unicodes,
+ names = names,
+ }
+ end
+end
+
+cid.loadfile = loadcidfile -- we use the frozen variant
+local template = "%s-%s-%s.cidmap"
+
+local function locate(registry,ordering,supplement)
+ local filename = format(template,registry,ordering,supplement)
+ local hashname = lower(filename)
+ local found = cidmap[hashname]
+ if not found then
+ if trace_loading then
+ report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename)
+ end
+ local fullname = resolvers.findfile(filename,'cid') or ""
+ if fullname ~= "" then
+ found = loadcidfile(fullname)
+ if found then
+ if trace_loading then
+ report_otf("using cidmap file %a",filename)
+ end
+ cidmap[hashname] = found
+ found.usedname = file.basename(filename)
+ end
+ end
+ end
+ return found
+end
+
+-- cf Arthur R. we can safely scan upwards since cids are downward compatible
+
+function cid.getmap(specification)
+ if not specification then
+ report_otf("invalid cidinfo specification, table expected")
+ return
+ end
+ local registry = specification.registry
+ local ordering = specification.ordering
+ local supplement = specification.supplement
+ local filename = format(registry,ordering,supplement)
+ local lowername = lower(filename)
+ local found = cidmap[lowername]
+ if found then
+ return found
+ end
+ if ordering == "Identity" then
+ local found = {
+ supplement = supplement,
+ registry = registry,
+ ordering = ordering,
+ filename = filename,
+ unicodes = { },
+ names = { },
+ }
+ cidmap[lowername] = found
+ return found
+ end
+ -- check for already loaded file
+ if trace_loading then
+ report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement)
+ end
+ found = locate(registry,ordering,supplement)
+ if not found then
+ local supnum = tonumber(supplement)
+ local cidnum = nil
+ -- next highest (alternatively we could start high)
+ if supnum < cidmax then
+ for s=supnum+1,cidmax do
+ local c = locate(registry,ordering,s)
+ if c then
+ found, cidnum = c, s
+ break
+ end
+ end
+ end
+ -- next lowest (least worse fit)
+ if not found and supnum > 0 then
+ for s=supnum-1,0,-1 do
+ local c = locate(registry,ordering,s)
+ if c then
+ found, cidnum = c, s
+ break
+ end
+ end
+ end
+ -- prevent further lookups -- somewhat tricky
+ registry = lower(registry)
+ ordering = lower(ordering)
+ if found and cidnum > 0 then
+ for s=0,cidnum-1 do
+ local filename = format(template,registry,ordering,s)
+ if not cidmap[filename] then
+ cidmap[filename] = found
+ end
+ end
+ end
+ end
+ return found
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-con.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-con.lua
new file mode 100644
index 00000000000..55d7793cf91
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-con.lua
@@ -0,0 +1,1448 @@
+if not modules then modules = { } end modules ['font-con'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- some names of table entries will be changed (no _)
+
+local next, tostring, rawget = next, tostring, rawget
+local format, match, lower, gsub = string.format, string.match, string.lower, string.gsub
+local utfbyte = utf.byte
+local sort, insert, concat, sortedkeys, serialize, fastcopy = table.sort, table.insert, table.concat, table.sortedkeys, table.serialize, table.fastcopy
+local derivetable = table.derive
+
+local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end)
+
+local report_defining = logs.reporter("fonts","defining")
+
+-- watch out: no negative depths and negative eights permitted in regular fonts
+
+--[[ldx--
+<p>Here we only implement a few helper functions.</p>
+--ldx]]--
+
+local fonts = fonts
+local constructors = fonts.constructors or { }
+fonts.constructors = constructors
+local handlers = fonts.handlers or { } -- can have preloaded tables
+fonts.handlers = handlers
+
+local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
+
+-- will be directives
+
+constructors.dontembed = allocate()
+constructors.autocleanup = true
+constructors.namemode = "fullpath" -- will be a function
+
+constructors.version = 1.01
+constructors.cache = containers.define("fonts", "constructors", constructors.version, false)
+
+constructors.privateoffset = 0xF0000 -- 0x10FFFF
+
+constructors.cacheintex = true -- so we see the original table in fonts.font
+
+-- Some experimental helpers (handy for tracing):
+--
+-- todo: extra:
+--
+-- extra_space => space.extra
+-- space => space.width
+-- space_stretch => space.stretch
+-- space_shrink => space.shrink
+
+-- We do keep the x-height, extra_space, space_shrink and space_stretch
+-- around as these are low level official names.
+
+constructors.keys = {
+ properties = {
+ encodingbytes = "number",
+ embedding = "number",
+ cidinfo = {
+ },
+ format = "string",
+ fontname = "string",
+ fullname = "string",
+ filename = "filename",
+ psname = "string",
+ name = "string",
+ virtualized = "boolean",
+ hasitalics = "boolean",
+ autoitalicamount = "basepoints",
+ nostackmath = "boolean",
+ noglyphnames = "boolean",
+ mode = "string",
+ hasmath = "boolean",
+ mathitalics = "boolean",
+ textitalics = "boolean",
+ finalized = "boolean",
+ },
+ parameters = {
+ mathsize = "number",
+ scriptpercentage = "float",
+ scriptscriptpercentage = "float",
+ units = "cardinal",
+ designsize = "scaledpoints",
+ expansion = {
+ stretch = "integerscale", -- might become float
+ shrink = "integerscale", -- might become float
+ step = "integerscale", -- might become float
+ auto = "boolean",
+ },
+ protrusion = {
+ auto = "boolean",
+ },
+ slantfactor = "float",
+ extendfactor = "float",
+ factor = "float",
+ hfactor = "float",
+ vfactor = "float",
+ size = "scaledpoints",
+ units = "scaledpoints",
+ scaledpoints = "scaledpoints",
+ slantperpoint = "scaledpoints",
+ spacing = {
+ width = "scaledpoints",
+ stretch = "scaledpoints",
+ shrink = "scaledpoints",
+ extra = "scaledpoints",
+ },
+ xheight = "scaledpoints",
+ quad = "scaledpoints",
+ ascender = "scaledpoints",
+ descender = "scaledpoints",
+ synonyms = {
+ space = "spacing.width",
+ spacestretch = "spacing.stretch",
+ spaceshrink = "spacing.shrink",
+ extraspace = "spacing.extra",
+ x_height = "xheight",
+ space_stretch = "spacing.stretch",
+ space_shrink = "spacing.shrink",
+ extra_space = "spacing.extra",
+ em = "quad",
+ ex = "xheight",
+ slant = "slantperpoint",
+ },
+ },
+ description = {
+ width = "basepoints",
+ height = "basepoints",
+ depth = "basepoints",
+ boundingbox = { },
+ },
+ character = {
+ width = "scaledpoints",
+ height = "scaledpoints",
+ depth = "scaledpoints",
+ italic = "scaledpoints",
+ },
+}
+
+-- This might become an interface:
+
+local designsizes = allocate()
+constructors.designsizes = designsizes
+local loadedfonts = allocate()
+constructors.loadedfonts = loadedfonts
+
+--[[ldx--
+<p>We need to normalize the scale factor (in scaled points). This has to
+do with the fact that <l n='tex'/> uses a negative multiple of 1000 as
+a signal for a font scaled based on the design size.</p>
+--ldx]]--
+
+local factors = {
+ pt = 65536.0,
+ bp = 65781.8,
+}
+
+function constructors.setfactor(f)
+ constructors.factor = factors[f or 'pt'] or factors.pt
+end
+
+constructors.setfactor()
+
+function constructors.scaled(scaledpoints, designsize) -- handles designsize in sp as well
+ if scaledpoints < 0 then
+ local factor = constructors.factor
+ if designsize then
+ if designsize > factor then -- or just 1000 / when? mp?
+ return (- scaledpoints/1000) * designsize -- sp's
+ else
+ return (- scaledpoints/1000) * designsize * factor
+ end
+ else
+ return (- scaledpoints/1000) * 10 * factor
+ end
+ else
+ return scaledpoints
+ end
+end
+
+--[[ldx--
+<p>Beware, the boundingbox is passed as reference so we may not overwrite it
+in the process; numbers are of course copies. Here 65536 equals 1pt. (Due to
+excessive memory usage in CJK fonts, we no longer pass the boundingbox.)</p>
+--ldx]]--
+
+-- The scaler is only used for otf and afm and virtual fonts. If a virtual font has italic
+-- correction make sure to set the hasitalics flag. Some more flags will be added in the
+-- future.
+
+--[[ldx--
+<p>The reason why the scaler was originally split, is that for a while we experimented
+with a helper function. However, in practice the <l n='api'/> calls are too slow to
+make this profitable and the <l n='lua'/> based variant was just faster. A days
+wasted day but an experience richer.</p>
+--ldx]]--
+
+-- we can get rid of the tfm instance when we have fast access to the
+-- scaled character dimensions at the tex end, e.g. a fontobject.width
+-- actually we already have some of that now as virtual keys in glyphs
+--
+-- flushing the kern and ligature tables from memory saves a lot (only
+-- base mode) but it complicates vf building where the new characters
+-- demand this data .. solution: functions that access them
+
+function constructors.cleanuptable(tfmdata)
+ if constructors.autocleanup and tfmdata.properties.virtualized then
+ for k, v in next, tfmdata.characters do
+ if v.commands then v.commands = nil end
+ -- if v.kerns then v.kerns = nil end
+ end
+ end
+end
+
+-- experimental, sharing kerns (unscaled and scaled) saves memory
+-- local sharedkerns, basekerns = constructors.check_base_kerns(tfmdata)
+-- loop over descriptions (afm and otf have descriptions, tfm not)
+-- there is no need (yet) to assign a value to chr.tonunicode
+
+-- constructors.prepare_base_kerns(tfmdata) -- optimalization
+
+-- we have target.name=metricfile and target.fullname=RealName and target.filename=diskfilename
+-- when collapsing fonts, luatex looks as both target.name and target.fullname as ttc files
+-- can have multiple subfonts
+
+function constructors.calculatescale(tfmdata,scaledpoints)
+ local parameters = tfmdata.parameters
+ if scaledpoints < 0 then
+ scaledpoints = (- scaledpoints/1000) * (tfmdata.designsize or parameters.designsize) -- already in sp
+ end
+ return scaledpoints, scaledpoints / (parameters.units or 1000) -- delta
+end
+
+local unscaled = {
+ ScriptPercentScaleDown = true,
+ ScriptScriptPercentScaleDown = true,
+ RadicalDegreeBottomRaisePercent = true
+}
+
+function constructors.assignmathparameters(target,original) -- simple variant, not used in context
+ -- when a tfm file is loaded, it has already been scaled
+ -- and it never enters the scaled so this is otf only and
+ -- even then we do some extra in the context math plugins
+ local mathparameters = original.mathparameters
+ if mathparameters and next(mathparameters) then
+ local targetparameters = target.parameters
+ local targetproperties = target.properties
+ local targetmathparameters = { }
+ local factor = targetproperties.math_is_scaled and 1 or targetparameters.factor
+ for name, value in next, mathparameters do
+ if unscaled[name] then
+ targetmathparameters[name] = value
+ else
+ targetmathparameters[name] = value * factor
+ end
+ end
+ if not targetmathparameters.FractionDelimiterSize then
+ targetmathparameters.FractionDelimiterSize = 1.01 * targetparameters.size
+ end
+ if not mathparameters.FractionDelimiterDisplayStyleSize then
+ targetmathparameters.FractionDelimiterDisplayStyleSize = 2.40 * targetparameters.size
+ end
+ target.mathparameters = targetmathparameters
+ end
+end
+
+function constructors.beforecopyingcharacters(target,original)
+ -- can be used for additional tweaking
+end
+
+function constructors.aftercopyingcharacters(target,original)
+ -- can be used for additional tweaking
+end
+
+-- It's probably ok to hash just the indices because there is not that much
+-- chance that one will shift slots and leave the others unset then. Anyway,
+-- there is of course some overhead here, but it might as well get compensated
+-- by less time spent on including the font resource twice. For the moment
+-- we default to false, so a macro package has to enable it explicitly. In
+-- LuaTeX the fullname is used to identify a font as being unique.
+
+constructors.sharefonts = false
+constructors.nofsharedfonts = 0
+local sharednames = { }
+
+function constructors.trytosharefont(target,tfmdata)
+ if constructors.sharefonts then -- not robust !
+ local characters = target.characters
+ local n = 1
+ local t = { target.psname }
+ local u = sortedkeys(characters)
+ for i=1,#u do
+ local k = u[i]
+ n = n + 1 ; t[n] = k
+ n = n + 1 ; t[n] = characters[k].index or k
+ end
+ local h = md5.HEX(concat(t," "))
+ local s = sharednames[h]
+ if s then
+ if trace_defining then
+ report_defining("font %a uses backend resources of font %a",target.fullname,s)
+ end
+ target.fullname = s
+ constructors.nofsharedfonts = constructors.nofsharedfonts + 1
+ target.properties.sharedwith = s
+ else
+ sharednames[h] = target.fullname
+ end
+ end
+end
+
+function constructors.enhanceparameters(parameters)
+ local xheight = parameters.x_height
+ local quad = parameters.quad
+ local space = parameters.space
+ local stretch = parameters.space_stretch
+ local shrink = parameters.space_shrink
+ local extra = parameters.extra_space
+ local slant = parameters.slant
+ parameters.xheight = xheight
+ parameters.spacestretch = stretch
+ parameters.spaceshrink = shrink
+ parameters.extraspace = extra
+ parameters.em = quad
+ parameters.ex = xheight
+ parameters.slantperpoint = slant
+ parameters.spacing = {
+ width = space,
+ stretch = stretch,
+ shrink = shrink,
+ extra = extra,
+ }
+end
+
+function constructors.scale(tfmdata,specification)
+ local target = { } -- the new table
+ --
+ if tonumber(specification) then
+ specification = { size = specification }
+ end
+ target.specification = specification
+ --
+ local scaledpoints = specification.size
+ local relativeid = specification.relativeid
+ --
+ local properties = tfmdata.properties or { }
+ local goodies = tfmdata.goodies or { }
+ local resources = tfmdata.resources or { }
+ local descriptions = tfmdata.descriptions or { } -- bad news if empty
+ local characters = tfmdata.characters or { } -- bad news if empty
+ local changed = tfmdata.changed or { } -- for base mode
+ local shared = tfmdata.shared or { }
+ local parameters = tfmdata.parameters or { }
+ local mathparameters = tfmdata.mathparameters or { }
+ --
+ local targetcharacters = { }
+ local targetdescriptions = derivetable(descriptions)
+ local targetparameters = derivetable(parameters)
+ local targetproperties = derivetable(properties)
+ local targetgoodies = goodies -- we need to loop so no metatable
+ target.characters = targetcharacters
+ target.descriptions = targetdescriptions
+ target.parameters = targetparameters
+ -- target.mathparameters = targetmathparameters -- happens elsewhere
+ target.properties = targetproperties
+ target.goodies = targetgoodies
+ target.shared = shared
+ target.resources = resources
+ target.unscaled = tfmdata -- the original unscaled one
+ --
+ -- specification.mathsize : 1=text 2=script 3=scriptscript
+ -- specification.textsize : natural (text)size
+ -- parameters.mathsize : 1=text 2=script 3=scriptscript >1000 enforced size (feature value other than yes)
+ --
+ local mathsize = tonumber(specification.mathsize) or 0
+ local textsize = tonumber(specification.textsize) or scaledpoints
+ local forcedsize = tonumber(parameters.mathsize ) or 0
+ local extrafactor = tonumber(specification.factor ) or 1
+ if (mathsize == 2 or forcedsize == 2) and parameters.scriptpercentage then
+ scaledpoints = parameters.scriptpercentage * textsize / 100
+ elseif (mathsize == 3 or forcedsize == 3) and parameters.scriptscriptpercentage then
+ scaledpoints = parameters.scriptscriptpercentage * textsize / 100
+ elseif forcedsize > 1000 then -- safeguard
+ scaledpoints = forcedsize
+ end
+ targetparameters.mathsize = mathsize -- context specific
+ targetparameters.textsize = textsize -- context specific
+ targetparameters.forcedsize = forcedsize -- context specific
+ targetparameters.extrafactor = extrafactor -- context specific
+ --
+ local tounicode = fonts.mappings.tounicode
+ --
+ local defaultwidth = resources.defaultwidth or 0
+ local defaultheight = resources.defaultheight or 0
+ local defaultdepth = resources.defaultdepth or 0
+ local units = parameters.units or 1000
+ --
+ if target.fonts then
+ target.fonts = fastcopy(target.fonts) -- maybe we virtualize more afterwards
+ end
+ --
+ -- boundary keys are no longer needed as we now have a string 'right_boundary'
+ -- that can be used in relevant tables (kerns and ligatures) ... not that I ever
+ -- used them
+ --
+ -- boundarychar_label = 0, -- not needed
+ -- boundarychar = 65536, -- there is now a string 'right_boundary'
+ -- false_boundarychar = 65536, -- produces invalid tfm in luatex
+ --
+ targetproperties.language = properties.language or "dflt" -- inherited
+ targetproperties.script = properties.script or "dflt" -- inherited
+ targetproperties.mode = properties.mode or "base" -- inherited
+ --
+ local askedscaledpoints = scaledpoints
+ local scaledpoints, delta = constructors.calculatescale(tfmdata,scaledpoints,nil,specification) -- no shortcut, dan be redefined
+ --
+ local hdelta = delta
+ local vdelta = delta
+ --
+ target.designsize = parameters.designsize -- not really needed so it might become obsolete
+ target.units = units
+ target.units_per_em = units -- just a trigger for the backend
+ --
+ local direction = properties.direction or tfmdata.direction or 0 -- pointless, as we don't use omf fonts at all
+ target.direction = direction
+ properties.direction = direction
+ --
+ target.size = scaledpoints
+ --
+ target.encodingbytes = properties.encodingbytes or 1
+ target.embedding = properties.embedding or "subset"
+ target.tounicode = 1
+ target.cidinfo = properties.cidinfo
+ target.format = properties.format
+ target.cache = constructors.cacheintex and "yes" or "renew"
+ --
+ local fontname = properties.fontname or tfmdata.fontname -- for the moment we fall back on
+ local fullname = properties.fullname or tfmdata.fullname -- names in the tfmdata although
+ local filename = properties.filename or tfmdata.filename -- that is not the right place to
+ local psname = properties.psname or tfmdata.psname -- pass them
+ local name = properties.name or tfmdata.name
+ --
+ if not psname or psname == "" then
+ -- name used in pdf file as well as for selecting subfont in ttc/dfont
+ psname = fontname or (fullname and fonts.names.cleanname(fullname))
+ end
+ target.fontname = fontname
+ target.fullname = fullname
+ target.filename = filename
+ target.psname = psname
+ target.name = name
+ --
+ --
+ properties.fontname = fontname
+ properties.fullname = fullname
+ properties.filename = filename
+ properties.psname = psname
+ properties.name = name
+ -- expansion (hz)
+ local expansion = parameters.expansion
+ if expansion then
+ target.stretch = expansion.stretch
+ target.shrink = expansion.shrink
+ target.step = expansion.step
+ target.auto_expand = expansion.auto
+ end
+ -- protrusion
+ local protrusion = parameters.protrusion
+ if protrusion then
+ target.auto_protrude = protrusion.auto
+ end
+ -- widening
+ local extendfactor = parameters.extendfactor or 0
+ if extendfactor ~= 0 and extendfactor ~= 1 then
+ hdelta = hdelta * extendfactor
+ target.extend = extendfactor * 1000 -- extent ?
+ else
+ target.extend = 1000 -- extent ?
+ end
+ -- slanting
+ local slantfactor = parameters.slantfactor or 0
+ if slantfactor ~= 0 then
+ target.slant = slantfactor * 1000
+ else
+ target.slant = 0
+ end
+ --
+ targetparameters.factor = delta
+ targetparameters.hfactor = hdelta
+ targetparameters.vfactor = vdelta
+ targetparameters.size = scaledpoints
+ targetparameters.units = units
+ targetparameters.scaledpoints = askedscaledpoints
+ --
+ local isvirtual = properties.virtualized or tfmdata.type == "virtual"
+ local hasquality = target.auto_expand or target.auto_protrude
+ local hasitalics = properties.hasitalics
+ local autoitalicamount = properties.autoitalicamount
+ local stackmath = not properties.nostackmath
+ local nonames = properties.noglyphnames
+ local haskerns = properties.haskerns or properties.mode == "base" -- we can have afm in node mode
+ local hasligatures = properties.hasligatures or properties.mode == "base" -- we can have afm in node mode
+ local realdimensions = properties.realdimensions
+ --
+ if changed and not next(changed) then
+ changed = false
+ end
+ --
+ target.type = isvirtual and "virtual" or "real"
+ --
+ target.postprocessors = tfmdata.postprocessors
+ --
+ local targetslant = (parameters.slant or parameters[1] or 0) * factors.pt -- per point
+ local targetspace = (parameters.space or parameters[2] or 0) * hdelta
+ local targetspace_stretch = (parameters.space_stretch or parameters[3] or 0) * hdelta
+ local targetspace_shrink = (parameters.space_shrink or parameters[4] or 0) * hdelta
+ local targetx_height = (parameters.x_height or parameters[5] or 0) * vdelta
+ local targetquad = (parameters.quad or parameters[6] or 0) * hdelta
+ local targetextra_space = (parameters.extra_space or parameters[7] or 0) * hdelta
+ --
+ targetparameters.slant = targetslant -- slantperpoint
+ targetparameters.space = targetspace
+ targetparameters.space_stretch = targetspace_stretch
+ targetparameters.space_shrink = targetspace_shrink
+ targetparameters.x_height = targetx_height
+ targetparameters.quad = targetquad
+ targetparameters.extra_space = targetextra_space
+ --
+ local ascender = parameters.ascender
+ if ascender then
+ targetparameters.ascender = delta * ascender
+ end
+ local descender = parameters.descender
+ if descender then
+ targetparameters.descender = delta * descender
+ end
+ --
+ constructors.enhanceparameters(targetparameters) -- official copies for us
+ --
+ local protrusionfactor = (targetquad ~= 0 and 1000/targetquad) or 0
+ local scaledwidth = defaultwidth * hdelta
+ local scaledheight = defaultheight * vdelta
+ local scaleddepth = defaultdepth * vdelta
+ --
+ local hasmath = (properties.hasmath or next(mathparameters)) and true
+ --
+ if hasmath then
+ constructors.assignmathparameters(target,tfmdata) -- does scaling and whatever is needed
+ properties.hasmath = true
+ target.nomath = false
+ target.MathConstants = target.mathparameters
+ else
+ properties.hasmath = false
+ target.nomath = true
+ target.mathparameters = nil -- nop
+ end
+ --
+ -- Here we support some context specific trickery (this might move to a plugin). During the
+ -- transition to opentype the engine had troubles with italics so we had some additional code
+ -- for fixing that. In node mode (text) we don't care much if italics gets passed because
+ -- the engine does nothign with them then.
+ --
+ if hasmath then
+ local mathitalics = properties.mathitalics
+ if mathitalics == false then
+ if trace_defining then
+ report_defining("%s italics %s for font %a, fullname %a, filename %a","math",hasitalics and "ignored" or "disabled",name,fullname,filename)
+ end
+ hasitalics = false
+ autoitalicamount = false
+ end
+ else
+ local textitalics = properties.textitalics
+ if textitalics == false then
+ if trace_defining then
+ report_defining("%s italics %s for font %a, fullname %a, filename %a","text",hasitalics and "ignored" or "disabled",name,fullname,filename)
+ end
+ hasitalics = false
+ autoitalicamount = false
+ end
+ end
+ --
+ -- end of context specific trickery
+ --
+ if trace_defining then
+ report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a",
+ name,fullname,filename,hdelta,vdelta,hasmath and "enabled" or "disabled",hasitalics and "enabled" or "disabled")
+ end
+ --
+ constructors.beforecopyingcharacters(target,tfmdata)
+ --
+ local sharedkerns = { }
+ --
+ -- we can have a dumb mode (basemode without math etc) that skips most
+ --
+ for unicode, character in next, characters do
+ local chr, description, index
+ if changed then
+ local c = changed[unicode]
+ if c then
+ description = descriptions[c] or descriptions[unicode] or character
+ character = characters[c] or character
+ index = description.index or c
+ else
+ description = descriptions[unicode] or character
+ index = description.index or unicode
+ end
+ else
+ description = descriptions[unicode] or character
+ index = description.index or unicode
+ end
+ local width = description.width
+ local height = description.height
+ local depth = description.depth
+ if realdimensions then
+ -- this is mostly for checking issues
+ if not height or height == 0 then
+ local bb = description.boundingbox
+ local ht = bb[4]
+ if ht ~= 0 then
+ height = ht
+ end
+ if not depth or depth == 0 then
+ local dp = -bb[2]
+ if dp ~= 0 then
+ depth = dp
+ end
+ end
+ elseif not depth or depth == 0 then
+ local dp = -description.boundingbox[2]
+ if dp ~= 0 then
+ depth = dp
+ end
+ end
+ end
+ if width then width = hdelta*width else width = scaledwidth end
+ if height then height = vdelta*height else height = scaledheight end
+ -- if depth then depth = vdelta*depth else depth = scaleddepth end
+ if depth and depth ~= 0 then
+ depth = delta*depth
+ if nonames then
+ chr = {
+ index = index,
+ height = height,
+ depth = depth,
+ width = width,
+ }
+ else
+ chr = {
+ name = description.name,
+ index = index,
+ height = height,
+ depth = depth,
+ width = width,
+ }
+ end
+ else
+ -- this saves a little bit of memory time and memory, esp for big cjk fonts
+ if nonames then
+ chr = {
+ index = index,
+ height = height,
+ width = width,
+ }
+ else
+ chr = {
+ name = description.name,
+ index = index,
+ height = height,
+ width = width,
+ }
+ end
+ end
+ local isunicode = description.unicode
+ if isunicode then
+ chr.unicode = isunicode
+ chr.tounicode = tounicode(isunicode)
+ end
+ if hasquality then
+ -- we could move these calculations elsewhere (saves calculations)
+ local ve = character.expansion_factor
+ if ve then
+ chr.expansion_factor = ve*1000 -- expansionfactor, hm, can happen elsewhere
+ end
+ local vl = character.left_protruding
+ if vl then
+ chr.left_protruding = protrusionfactor*width*vl
+ end
+ local vr = character.right_protruding
+ if vr then
+ chr.right_protruding = protrusionfactor*width*vr
+ end
+ end
+ --
+ if hasmath then
+ --
+ -- todo, just operate on descriptions.math
+ local vn = character.next
+ if vn then
+ chr.next = vn
+ else
+ local vv = character.vert_variants
+ if vv then
+ local t = { }
+ for i=1,#vv do
+ local vvi = vv[i]
+ t[i] = {
+ ["start"] = (vvi["start"] or 0)*vdelta,
+ ["end"] = (vvi["end"] or 0)*vdelta,
+ ["advance"] = (vvi["advance"] or 0)*vdelta,
+ ["extender"] = vvi["extender"],
+ ["glyph"] = vvi["glyph"],
+ }
+ end
+ chr.vert_variants = t
+ else
+ local hv = character.horiz_variants
+ if hv then
+ local t = { }
+ for i=1,#hv do
+ local hvi = hv[i]
+ t[i] = {
+ ["start"] = (hvi["start"] or 0)*hdelta,
+ ["end"] = (hvi["end"] or 0)*hdelta,
+ ["advance"] = (hvi["advance"] or 0)*hdelta,
+ ["extender"] = hvi["extender"],
+ ["glyph"] = hvi["glyph"],
+ }
+ end
+ chr.horiz_variants = t
+ end
+ end
+ -- todo also check mathitalics (or that one can go away)
+ end
+ local vi = character.vert_italic
+ if vi and vi ~= 0 then
+ chr.vert_italic = vi*hdelta
+ end
+ local va = character.accent
+ if va then
+ chr.top_accent = vdelta*va
+ end
+ if stackmath then
+ local mk = character.mathkerns -- not in math ?
+ if mk then
+ local kerns = { }
+ local v = mk.top_right if v then local k = { } for i=1,#v do local vi = v[i]
+ k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
+ end kerns.top_right = k end
+ local v = mk.top_left if v then local k = { } for i=1,#v do local vi = v[i]
+ k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
+ end kerns.top_left = k end
+ local v = mk.bottom_left if v then local k = { } for i=1,#v do local vi = v[i]
+ k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
+ end kerns.bottom_left = k end
+ local v = mk.bottom_right if v then local k = { } for i=1,#v do local vi = v[i]
+ k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
+ end kerns.bottom_right = k end
+ chr.mathkern = kerns -- singular -> should be patched in luatex !
+ end
+ end
+ if hasitalics then
+ local vi = character.italic
+ if vi and vi ~= 0 then
+ chr.italic = vi*hdelta
+ end
+ end
+ elseif autoitalicamount then -- itlc feature
+ local vi = description.italic
+ if not vi then
+ local vi = description.boundingbox[3] - description.width + autoitalicamount
+ if vi > 0 then -- < 0 indicates no overshoot or a very small auto italic
+ chr.italic = vi*hdelta
+ end
+ elseif vi ~= 0 then
+ chr.italic = vi*hdelta
+ end
+ elseif hasitalics then -- unlikely
+ local vi = character.italic
+ if vi and vi ~= 0 then
+ chr.italic = vi*hdelta
+ end
+ end
+ if haskerns then
+ local vk = character.kerns
+ if vk then
+ local s = sharedkerns[vk]
+ if not s then
+ s = { }
+ for k,v in next, vk do s[k] = v*hdelta end
+ sharedkerns[vk] = s
+ end
+ chr.kerns = s
+ end
+ end
+ if hasligatures then
+ local vl = character.ligatures
+ if vl then
+ if true then
+ chr.ligatures = vl -- shared
+ else
+ local tt = { }
+ for i, l in next, vl do
+ tt[i] = l
+ end
+ chr.ligatures = tt
+ end
+ end
+ end
+ if isvirtual then
+ local vc = character.commands
+ if vc then
+ -- we assume non scaled commands here
+ -- tricky .. we need to scale pseudo math glyphs too
+ -- which is why we deal with rules too
+ local ok = false
+ for i=1,#vc do
+ local key = vc[i][1]
+ if key == "right" or key == "down" then
+ ok = true
+ break
+ end
+ end
+ if ok then
+ local tt = { }
+ for i=1,#vc do
+ local ivc = vc[i]
+ local key = ivc[1]
+ if key == "right" then
+ tt[i] = { key, ivc[2]*hdelta }
+ elseif key == "down" then
+ tt[i] = { key, ivc[2]*vdelta }
+ elseif key == "rule" then
+ tt[i] = { key, ivc[2]*vdelta, ivc[3]*hdelta }
+ else -- not comment
+ tt[i] = ivc -- shared since in cache and untouched
+ end
+ end
+ chr.commands = tt
+ else
+ chr.commands = vc
+ end
+ chr.index = nil
+ end
+ end
+ targetcharacters[unicode] = chr
+ end
+ --
+ properties.setitalics = hasitalics -- for postprocessing
+ --
+ constructors.aftercopyingcharacters(target,tfmdata)
+ --
+ constructors.trytosharefont(target,tfmdata)
+ --
+ return target
+end
+
+function constructors.finalize(tfmdata)
+ if tfmdata.properties and tfmdata.properties.finalized then
+ return
+ end
+ --
+ if not tfmdata.characters then
+ return nil
+ end
+ --
+ if not tfmdata.goodies then
+ tfmdata.goodies = { } -- context specific
+ end
+ --
+ local parameters = tfmdata.parameters
+ if not parameters then
+ return nil
+ end
+ --
+ if not parameters.expansion then
+ parameters.expansion = {
+ stretch = tfmdata.stretch or 0,
+ shrink = tfmdata.shrink or 0,
+ step = tfmdata.step or 0,
+ auto = tfmdata.auto_expand or false,
+ }
+ end
+ --
+ if not parameters.protrusion then
+ parameters.protrusion = {
+ auto = auto_protrude
+ }
+ end
+ --
+ if not parameters.size then
+ parameters.size = tfmdata.size
+ end
+ --
+ if not parameters.extendfactor then
+ parameters.extendfactor = tfmdata.extend or 0
+ end
+ --
+ if not parameters.slantfactor then
+ parameters.slantfactor = tfmdata.slant or 0
+ end
+ --
+ local designsize = parameters.designsize
+ if designsize then
+ parameters.minsize = tfmdata.minsize or designsize
+ parameters.maxsize = tfmdata.maxsize or designsize
+ else
+ designsize = factors.pt * 10
+ parameters.designsize = designsize
+ parameters.minsize = designsize
+ parameters.maxsize = designsize
+ end
+ parameters.minsize = tfmdata.minsize or parameters.designsize
+ parameters.maxsize = tfmdata.maxsize or parameters.designsize
+ --
+ if not parameters.units then
+ parameters.units = tfmdata.units or tfmdata.units_per_em or 1000
+ end
+ --
+ if not tfmdata.descriptions then
+ local descriptions = { } -- yes or no
+ setmetatableindex(descriptions, function(t,k) local v = { } t[k] = v return v end)
+ tfmdata.descriptions = descriptions
+ end
+ --
+ local properties = tfmdata.properties
+ if not properties then
+ properties = { }
+ tfmdata.properties = properties
+ end
+ --
+ if not properties.virtualized then
+ properties.virtualized = tfmdata.type == "virtual"
+ end
+ --
+ if not tfmdata.properties then
+ tfmdata.properties = {
+ fontname = tfmdata.fontname,
+ filename = tfmdata.filename,
+ fullname = tfmdata.fullname,
+ name = tfmdata.name,
+ psname = tfmdata.psname,
+ --
+ encodingbytes = tfmdata.encodingbytes or 1,
+ embedding = tfmdata.embedding or "subset",
+ tounicode = tfmdata.tounicode or 1,
+ cidinfo = tfmdata.cidinfo or nil,
+ format = tfmdata.format or "type1",
+ direction = tfmdata.direction or 0,
+ }
+ end
+ if not tfmdata.resources then
+ tfmdata.resources = { }
+ end
+ if not tfmdata.shared then
+ tfmdata.shared = { }
+ end
+ --
+ -- tfmdata.fonts
+ -- tfmdata.unscaled
+ --
+ if not properties.hasmath then
+ properties.hasmath = not tfmdata.nomath
+ end
+ --
+ tfmdata.MathConstants = nil
+ tfmdata.postprocessors = nil
+ --
+ tfmdata.fontname = nil
+ tfmdata.filename = nil
+ tfmdata.fullname = nil
+ tfmdata.name = nil -- most tricky part
+ tfmdata.psname = nil
+ --
+ tfmdata.encodingbytes = nil
+ tfmdata.embedding = nil
+ tfmdata.tounicode = nil
+ tfmdata.cidinfo = nil
+ tfmdata.format = nil
+ tfmdata.direction = nil
+ tfmdata.type = nil
+ tfmdata.nomath = nil
+ tfmdata.designsize = nil
+ --
+ tfmdata.size = nil
+ tfmdata.stretch = nil
+ tfmdata.shrink = nil
+ tfmdata.step = nil
+ tfmdata.auto_expand = nil
+ tfmdata.auto_protrude = nil
+ tfmdata.extend = nil
+ tfmdata.slant = nil
+ tfmdata.units = nil
+ tfmdata.units_per_em = nil
+ --
+ tfmdata.cache = nil
+ --
+ properties.finalized = true
+ --
+ return tfmdata
+end
+
+--[[ldx--
+<p>A unique hash value is generated by:</p>
+--ldx]]--
+
+local hashmethods = { }
+constructors.hashmethods = hashmethods
+
+function constructors.hashfeatures(specification) -- will be overloaded
+ local features = specification.features
+ if features then
+ local t, tn = { }, 0
+ for category, list in next, features do
+ if next(list) then
+ local hasher = hashmethods[category]
+ if hasher then
+ local hash = hasher(list)
+ if hash then
+ tn = tn + 1
+ t[tn] = category .. ":" .. hash
+ end
+ end
+ end
+ end
+ if tn > 0 then
+ return concat(t," & ")
+ end
+ end
+ return "unknown"
+end
+
+hashmethods.normal = function(list)
+ local s = { }
+ local n = 0
+ for k, v in next, list do
+ if not k then
+ -- no need to add to hash
+ elseif k == "number" or k == "features" then
+ -- no need to add to hash (maybe we need a skip list)
+ else
+ n = n + 1
+ s[n] = k
+ end
+ end
+ if n > 0 then
+ sort(s)
+ for i=1,n do
+ local k = s[i]
+ s[i] = k .. '=' .. tostring(list[k])
+ end
+ return concat(s,"+")
+ end
+end
+
+--[[ldx--
+<p>In principle we can share tfm tables when we are in node for a font, but then
+we need to define a font switch as an id/attr switch which is no fun, so in that
+case users can best use dynamic features ... so, we will not use that speedup. Okay,
+when we get rid of base mode we can optimize even further by sharing, but then we
+loose our testcases for <l n='luatex'/>.</p>
+--ldx]]--
+
+function constructors.hashinstance(specification,force)
+ local hash, size, fallbacks = specification.hash, specification.size, specification.fallbacks
+ if force or not hash then
+ hash = constructors.hashfeatures(specification)
+ specification.hash = hash
+ end
+ if size < 1000 and designsizes[hash] then
+ size = math.round(constructors.scaled(size,designsizes[hash]))
+ specification.size = size
+ end
+ if fallbacks then
+ return hash .. ' @ ' .. tostring(size) .. ' @ ' .. fallbacks
+ else
+ return hash .. ' @ ' .. tostring(size)
+ end
+end
+
+function constructors.setname(tfmdata,specification) -- todo: get specification from tfmdata
+ if constructors.namemode == "specification" then
+ -- not to be used in context !
+ local specname = specification.specification
+ if specname then
+ tfmdata.properties.name = specname
+ if trace_defining then
+ report_otf("overloaded fontname %a",specname)
+ end
+ end
+ end
+end
+
+function constructors.checkedfilename(data)
+ local foundfilename = data.foundfilename
+ if not foundfilename then
+ local askedfilename = data.filename or ""
+ if askedfilename ~= "" then
+ askedfilename = resolvers.resolve(askedfilename) -- no shortcut
+ foundfilename = resolvers.findbinfile(askedfilename,"") or ""
+ if foundfilename == "" then
+ report_defining("source file %a is not found",askedfilename)
+ foundfilename = resolvers.findbinfile(file.basename(askedfilename),"") or ""
+ if foundfilename ~= "" then
+ report_defining("using source file %a due to cache mismatch",foundfilename)
+ end
+ end
+ end
+ data.foundfilename = foundfilename
+ end
+ return foundfilename
+end
+
+local formats = allocate()
+fonts.formats = formats
+
+setmetatableindex(formats, function(t,k)
+ local l = lower(k)
+ if rawget(t,k) then
+ t[k] = l
+ return l
+ end
+ return rawget(t,file.suffix(l))
+end)
+
+local locations = { }
+
+local function setindeed(mode,target,group,name,action,position)
+ local t = target[mode]
+ if not t then
+ report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode)
+ os.exit()
+ elseif position then
+ -- todo: remove existing
+ insert(t, position, { name = name, action = action })
+ else
+ for i=1,#t do
+ local ti = t[i]
+ if ti.name == name then
+ ti.action = action
+ return
+ end
+ end
+ insert(t, { name = name, action = action })
+ end
+end
+
+local function set(group,name,target,source)
+ target = target[group]
+ if not target then
+ report_defining("fatal target error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local source = source[group]
+ if not source then
+ report_defining("fatal source error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local node = source.node
+ local base = source.base
+ local position = source.position
+ if node then
+ setindeed("node",target,group,name,node,position)
+ end
+ if base then
+ setindeed("base",target,group,name,base,position)
+ end
+end
+
+local function register(where,specification)
+ local name = specification.name
+ if name and name ~= "" then
+ local default = specification.default
+ local description = specification.description
+ local initializers = specification.initializers
+ local processors = specification.processors
+ local manipulators = specification.manipulators
+ local modechecker = specification.modechecker
+ if default then
+ where.defaults[name] = default
+ end
+ if description and description ~= "" then
+ where.descriptions[name] = description
+ end
+ if initializers then
+ set('initializers',name,where,specification)
+ end
+ if processors then
+ set('processors', name,where,specification)
+ end
+ if manipulators then
+ set('manipulators',name,where,specification)
+ end
+ if modechecker then
+ where.modechecker = modechecker
+ end
+ end
+end
+
+constructors.registerfeature = register
+
+function constructors.getfeatureaction(what,where,mode,name)
+ what = handlers[what].features
+ if what then
+ where = what[where]
+ if where then
+ mode = where[mode]
+ if mode then
+ for i=1,#mode do
+ local m = mode[i]
+ if m.name == name then
+ return m.action
+ end
+ end
+ end
+ end
+ end
+end
+
+function constructors.newhandler(what) -- could be a metatable newindex
+ local handler = handlers[what]
+ if not handler then
+ handler = { }
+ handlers[what] = handler
+ end
+ return handler
+end
+
+function constructors.newfeatures(what) -- could be a metatable newindex
+ local handler = handlers[what]
+ local features = handler.features
+ if not features then
+ local tables = handler.tables -- can be preloaded
+ local statistics = handler.statistics -- can be preloaded
+ features = allocate {
+ defaults = { },
+ descriptions = tables and tables.features or { },
+ used = statistics and statistics.usedfeatures or { },
+ initializers = { base = { }, node = { } },
+ processors = { base = { }, node = { } },
+ manipulators = { base = { }, node = { } },
+ }
+ features.register = function(specification) return register(features,specification) end
+ handler.features = features -- will also become hidden
+ end
+ return features
+end
+
+--[[ldx--
+<p>We need to check for default features. For this we provide
+a helper function.</p>
+--ldx]]--
+
+function constructors.checkedfeatures(what,features)
+ local defaults = handlers[what].features.defaults
+ if features and next(features) then
+ features = fastcopy(features) -- can be inherited (mt) but then no loops possible
+ for key, value in next, defaults do
+ if features[key] == nil then
+ features[key] = value
+ end
+ end
+ return features
+ else
+ return fastcopy(defaults) -- we can change features in place
+ end
+end
+
+-- before scaling
+
+function constructors.initializefeatures(what,tfmdata,features,trace,report)
+ if features and next(features) then
+ local properties = tfmdata.properties or { } -- brrr
+ local whathandler = handlers[what]
+ local whatfeatures = whathandler.features
+ local whatinitializers = whatfeatures.initializers
+ local whatmodechecker = whatfeatures.modechecker
+ -- properties.mode can be enforces (for instance in font-otd)
+ local mode = properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base"
+ properties.mode = mode -- also status
+ features.mode = mode -- both properties.mode or features.mode can be changed
+ --
+ local done = { }
+ while true do
+ local redo = false
+ local initializers = whatfeatures.initializers[mode]
+ if initializers then
+ for i=1,#initializers do
+ local step = initializers[i]
+ local feature = step.name
+-- we could intercept mode here .. needs a rewrite of this whole loop then but it's cleaner that way
+ local value = features[feature]
+ if not value then
+ -- disabled
+ elseif done[feature] then
+ -- already done
+ else
+ local action = step.action
+ if trace then
+ report("initializing feature %a to %a for mode %a for font %a",feature,
+ value,mode,tfmdata.properties.fullname)
+ end
+ action(tfmdata,value,features) -- can set mode (e.g. goodies) so it can trigger a restart
+ if mode ~= properties.mode or mode ~= features.mode then
+ if whatmodechecker then
+ properties.mode = whatmodechecker(tfmdata,features,properties.mode) -- force checking
+ features.mode = properties.mode
+ end
+ if mode ~= properties.mode then
+ mode = properties.mode
+ redo = true
+ end
+ end
+ done[feature] = true
+ end
+ if redo then
+ break
+ end
+ end
+ if not redo then
+ break
+ end
+ else
+ break
+ end
+ end
+ properties.mode = mode -- to be sure
+ return true
+ else
+ return false
+ end
+end
+
+-- while typesetting
+
+function constructors.collectprocessors(what,tfmdata,features,trace,report)
+ local processes, nofprocesses = { }, 0
+ if features and next(features) then
+ local properties = tfmdata.properties
+ local whathandler = handlers[what]
+ local whatfeatures = whathandler.features
+ local whatprocessors = whatfeatures.processors
+ local mode = properties.mode
+ local processors = whatprocessors[mode]
+ if processors then
+ for i=1,#processors do
+ local step = processors[i]
+ local feature = step.name
+ if features[feature] then
+ local action = step.action
+ if trace then
+ report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
+ end
+ if action then
+ nofprocesses = nofprocesses + 1
+ processes[nofprocesses] = action
+ end
+ end
+ end
+ elseif trace then
+ report("no feature processors for mode %a for font %a",mode,properties.fullname)
+ end
+ end
+ return processes
+end
+
+-- after scaling
+
+function constructors.applymanipulators(what,tfmdata,features,trace,report)
+ if features and next(features) then
+ local properties = tfmdata.properties
+ local whathandler = handlers[what]
+ local whatfeatures = whathandler.features
+ local whatmanipulators = whatfeatures.manipulators
+ local mode = properties.mode
+ local manipulators = whatmanipulators[mode]
+ if manipulators then
+ for i=1,#manipulators do
+ local step = manipulators[i]
+ local feature = step.name
+ local value = features[feature]
+ if value then
+ local action = step.action
+ if trace then
+ report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname)
+ end
+ if action then
+ action(tfmdata,feature,value)
+ end
+ end
+ end
+ end
+ end
+end
+
+function constructors.addcoreunicodes(unicodes) -- maybe make this a metatable if used at all
+ if not unicodes then
+ unicodes = { }
+ end
+ unicodes.space = 0x0020
+ unicodes.hyphen = 0x002D
+ unicodes.zwj = 0x200D
+ unicodes.zwnj = 0x200C
+ return unicodes
+end
+
+-- -- keep for a while: old tounicode code
+--
+-- if changed then
+-- -- basemode hack (we try to catch missing tounicodes, e.g. needed for ssty in math cambria)
+-- local c = changed[unicode]
+-- if c then
+-- -- local ligatures = character.ligatures -- the original ligatures (as we cannot rely on remapping)
+-- description = descriptions[c] or descriptions[unicode] or character
+-- character = characters[c] or character
+-- index = description.index or c
+-- if tounicode then
+-- touni = tounicode[index] -- nb: index!
+-- if not touni then -- goodie
+-- local d = descriptions[unicode] or characters[unicode]
+-- local i = d.index or unicode
+-- touni = tounicode[i] -- nb: index!
+-- end
+-- end
+-- -- if ligatures and not character.ligatures then
+-- -- character.ligatures = ligatures -- the original targets (for now at least.. see libertine smallcaps)
+-- -- end
+-- else
+-- description = descriptions[unicode] or character
+-- index = description.index or unicode
+-- if tounicode then
+-- touni = tounicode[index] -- nb: index!
+-- end
+-- end
+-- else
+-- description = descriptions[unicode] or character
+-- index = description.index or unicode
+-- if tounicode then
+-- touni = tounicode[index] -- nb: index!
+-- end
+-- end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-def.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-def.lua
new file mode 100644
index 00000000000..add42ee3828
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-def.lua
@@ -0,0 +1,454 @@
+if not modules then modules = { } end modules ['font-def'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- We can overload some of the definers.functions so we don't local them.
+
+local format, gmatch, match, find, lower, gsub = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub
+local tostring, next = tostring, next
+local lpegmatch = lpeg.match
+local suffixonly, removesuffix = file.suffix, file.removesuffix
+
+local allocate = utilities.storage.allocate
+
+local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end)
+local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end)
+
+trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading")
+trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*")
+
+local report_defining = logs.reporter("fonts","defining")
+
+--[[ldx--
+<p>Here we deal with defining fonts. We do so by intercepting the
+default loader that only handles <l n='tfm'/>.</p>
+--ldx]]--
+
+local fonts = fonts
+local fontdata = fonts.hashes.identifiers
+local readers = fonts.readers
+local definers = fonts.definers
+local specifiers = fonts.specifiers
+local constructors = fonts.constructors
+local fontgoodies = fonts.goodies
+
+readers.sequence = allocate { 'otf', 'ttf', 'afm', 'tfm', 'lua' } -- dfont ttc
+
+local variants = allocate()
+specifiers.variants = variants
+
+definers.methods = definers.methods or { }
+
+local internalized = allocate() -- internal tex numbers (private)
+local lastdefined = nil -- we don't want this one to end up in s-tra-02
+
+local loadedfonts = constructors.loadedfonts
+local designsizes = constructors.designsizes
+
+-- not in generic (some day I'll make two defs, one for context, one for generic)
+
+local resolvefile = fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
+
+--[[ldx--
+<p>We hardly gain anything when we cache the final (pre scaled)
+<l n='tfm'/> table. But it can be handy for debugging, so we no
+longer carry this code along. Also, we now have quite some reference
+to other tables so we would end up with lots of catches.</p>
+--ldx]]--
+
+--[[ldx--
+<p>We can prefix a font specification by <type>name:</type> or
+<type>file:</type>. The first case will result in a lookup in the
+synonym table.</p>
+
+<typing>
+[ name: | file: ] identifier [ separator [ specification ] ]
+</typing>
+
+<p>The following function split the font specification into components
+and prepares a table that will move along as we proceed.</p>
+--ldx]]--
+
+-- beware, we discard additional specs
+--
+-- method:name method:name(sub) method:name(sub)*spec method:name*spec
+-- name name(sub) name(sub)*spec name*spec
+-- name@spec*oeps
+
+local splitter, splitspecifiers = nil, "" -- not so nice
+
+local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc
+
+local left = P("(")
+local right = P(")")
+local colon = P(":")
+local space = P(" ")
+
+definers.defaultlookup = "file"
+
+local prefixpattern = P(false)
+
+local function addspecifier(symbol)
+ splitspecifiers = splitspecifiers .. symbol
+ local method = S(splitspecifiers)
+ local lookup = C(prefixpattern) * colon
+ local sub = left * C(P(1-left-right-method)^1) * right
+ local specification = C(method) * C(P(1)^1)
+ local name = C((1-sub-specification)^1)
+ splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc("")))
+end
+
+local function addlookup(str,default)
+ prefixpattern = prefixpattern + P(str)
+end
+
+definers.addlookup = addlookup
+
+addlookup("file")
+addlookup("name")
+addlookup("spec")
+
+local function getspecification(str)
+ return lpegmatch(splitter,str or "") -- weird catch
+end
+
+definers.getspecification = getspecification
+
+function definers.registersplit(symbol,action,verbosename)
+ addspecifier(symbol)
+ variants[symbol] = action
+ if verbosename then
+ variants[verbosename] = action
+ end
+end
+
+local function makespecification(specification,lookup,name,sub,method,detail,size)
+ size = size or 655360
+ if not lookup or lookup == "" then
+ lookup = definers.defaultlookup
+ end
+ if trace_defining then
+ report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a",
+ specification, lookup, name, sub, method, detail)
+ end
+ local t = {
+ lookup = lookup, -- forced type
+ specification = specification, -- full specification
+ size = size, -- size in scaled points or -1000*n
+ name = name, -- font or filename
+ sub = sub, -- subfont (eg in ttc)
+ method = method, -- specification method
+ detail = detail, -- specification
+ resolved = "", -- resolved font name
+ forced = "", -- forced loader
+ features = { }, -- preprocessed features
+ }
+ return t
+end
+
+
+definers.makespecification = makespecification
+
+function definers.analyze(specification, size)
+ -- can be optimized with locals
+ local lookup, name, sub, method, detail = getspecification(specification or "")
+ return makespecification(specification, lookup, name, sub, method, detail, size)
+end
+
+--[[ldx--
+<p>We can resolve the filename using the next function:</p>
+--ldx]]--
+
+definers.resolvers = definers.resolvers or { }
+local resolvers = definers.resolvers
+
+-- todo: reporter
+
+function resolvers.file(specification)
+ local name = resolvefile(specification.name) -- catch for renames
+ local suffix = lower(suffixonly(name))
+ if fonts.formats[suffix] then
+ specification.forced = suffix
+ specification.forcedname = name
+ specification.name = removesuffix(name)
+ else
+ specification.name = name -- can be resolved
+ end
+end
+
+function resolvers.name(specification)
+ local resolve = fonts.names.resolve
+ if resolve then
+ local resolved, sub, subindex = resolve(specification.name,specification.sub,specification) -- we pass specification for overloaded versions
+ if resolved then
+ specification.resolved = resolved
+ specification.sub = sub
+ specification.subindex = subindex
+ local suffix = lower(suffixonly(resolved))
+ if fonts.formats[suffix] then
+ specification.forced = suffix
+ specification.forcedname = resolved
+ specification.name = removesuffix(resolved)
+ else
+ specification.name = resolved
+ end
+ end
+ else
+ resolvers.file(specification)
+ end
+end
+
+function resolvers.spec(specification)
+ local resolvespec = fonts.names.resolvespec
+ if resolvespec then
+ local resolved, sub, subindex = resolvespec(specification.name,specification.sub,specification) -- we pass specification for overloaded versions
+ if resolved then
+ specification.resolved = resolved
+ specification.sub = sub
+ specification.subindex = subindex
+ specification.forced = lower(suffixonly(resolved))
+ specification.forcedname = resolved
+ specification.name = removesuffix(resolved)
+ end
+ else
+ resolvers.name(specification)
+ end
+end
+
+function definers.resolve(specification)
+ if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash
+ local r = resolvers[specification.lookup]
+ if r then
+ r(specification)
+ end
+ end
+ if specification.forced == "" then
+ specification.forced = nil
+ specification.forcedname = nil
+ end
+ specification.hash = lower(specification.name .. ' @ ' .. constructors.hashfeatures(specification))
+ if specification.sub and specification.sub ~= "" then
+ specification.hash = specification.sub .. ' @ ' .. specification.hash
+ end
+ return specification
+end
+
+--[[ldx--
+<p>The main read function either uses a forced reader (as determined by
+a lookup) or tries to resolve the name using the list of readers.</p>
+
+<p>We need to cache when possible. We do cache raw tfm data (from <l
+n='tfm'/>, <l n='afm'/> or <l n='otf'/>). After that we can cache based
+on specificstion (name) and size, that is, <l n='tex'/> only needs a number
+for an already loaded fonts. However, it may make sense to cache fonts
+before they're scaled as well (store <l n='tfm'/>'s with applied methods
+and features). However, there may be a relation between the size and
+features (esp in virtual fonts) so let's not do that now.</p>
+
+<p>Watch out, here we do load a font, but we don't prepare the
+specification yet.</p>
+--ldx]]--
+
+-- very experimental:
+
+function definers.applypostprocessors(tfmdata)
+ local postprocessors = tfmdata.postprocessors
+ if postprocessors then
+ local properties = tfmdata.properties
+ for i=1,#postprocessors do
+ local extrahash = postprocessors[i](tfmdata) -- after scaling etc
+ if type(extrahash) == "string" and extrahash ~= "" then
+ -- e.g. a reencoding needs this
+ extrahash = gsub(lower(extrahash),"[^a-z]","-")
+ properties.fullname = format("%s-%s",properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
+
+-- function definers.applypostprocessors(tfmdata)
+-- return tfmdata
+-- end
+
+local function checkembedding(tfmdata)
+ local properties = tfmdata.properties
+ local embedding
+ if directive_embedall then
+ embedding = "full"
+ elseif properties and properties.filename and constructors.dontembed[properties.filename] then
+ embedding = "no"
+ else
+ embedding = "subset"
+ end
+ if properties then
+ properties.embedding = embedding
+ else
+ tfmdata.properties = { embedding = embedding }
+ end
+ tfmdata.embedding = embedding
+end
+
+function definers.loadfont(specification)
+ local hash = constructors.hashinstance(specification)
+ local tfmdata = loadedfonts[hash] -- hashes by size !
+ if not tfmdata then
+ local forced = specification.forced or ""
+ if forced ~= "" then
+ local reader = readers[lower(forced)] -- normally forced is already lowered
+ tfmdata = reader and reader(specification)
+ if not tfmdata then
+ report_defining("forced type %a of %a not found",forced,specification.name)
+ end
+ else
+ local sequence = readers.sequence -- can be overloaded so only a shortcut here
+ for s=1,#sequence do
+ local reader = sequence[s]
+ if readers[reader] then -- we skip not loaded readers
+ if trace_defining then
+ report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename)
+ end
+ tfmdata = readers[reader](specification)
+ if tfmdata then
+ break
+ else
+ specification.filename = nil
+ end
+ end
+ end
+ end
+ if tfmdata then
+ tfmdata = definers.applypostprocessors(tfmdata)
+ checkembedding(tfmdata) -- todo: general postprocessor
+ loadedfonts[hash] = tfmdata
+ designsizes[specification.hash] = tfmdata.parameters.designsize
+ end
+ end
+ if not tfmdata then
+ report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup)
+ end
+ return tfmdata
+end
+
+function constructors.checkvirtualids()
+ -- dummy in plain version
+end
+
+function constructors.readanddefine(name,size) -- no id -- maybe a dummy first
+ local specification = definers.analyze(name,size)
+ local method = specification.method
+ if method and variants[method] then
+ specification = variants[method](specification)
+ end
+ specification = definers.resolve(specification)
+ local hash = constructors.hashinstance(specification)
+ local id = definers.registered(hash)
+ if not id then
+ local tfmdata = definers.loadfont(specification)
+ if tfmdata then
+ tfmdata.properties.hash = hash
+ constructors.checkvirtualids(tfmdata) -- experiment, will become obsolete when slots can selfreference
+ id = font.define(tfmdata)
+ definers.register(tfmdata,id)
+ else
+ id = 0 -- signal
+ end
+ end
+ return fontdata[id], id
+end
+
+--[[ldx--
+<p>So far the specifiers. Now comes the real definer. Here we cache
+based on id's. Here we also intercept the virtual font handler. Since
+it evolved stepwise I may rewrite this bit (combine code).</p>
+
+In the previously defined reader (the one resulting in a <l n='tfm'/>
+table) we cached the (scaled) instances. Here we cache them again, but
+this time based on id. We could combine this in one cache but this does
+not gain much. By the way, passing id's back to in the callback was
+introduced later in the development.</p>
+--ldx]]--
+
+function definers.current() -- or maybe current
+ return lastdefined
+end
+
+function definers.registered(hash)
+ local id = internalized[hash]
+ return id, id and fontdata[id]
+end
+
+function definers.register(tfmdata,id)
+ if tfmdata and id then
+ local hash = tfmdata.properties.hash
+ if not hash then
+ report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?")
+ elseif not internalized[hash] then
+ internalized[hash] = id
+ if trace_defining then
+ report_defining("registering font, id %s, hash %a",id,hash)
+ end
+ fontdata[id] = tfmdata
+ end
+ end
+end
+
+function definers.read(specification,size,id) -- id can be optional, name can already be table
+ statistics.starttiming(fonts)
+ if type(specification) == "string" then
+ specification = definers.analyze(specification,size)
+ end
+ local method = specification.method
+ if method and variants[method] then
+ specification = variants[method](specification)
+ end
+ specification = definers.resolve(specification)
+ local hash = constructors.hashinstance(specification)
+ local tfmdata = definers.registered(hash) -- id
+ if tfmdata then
+ if trace_defining then
+ report_defining("already hashed: %s",hash)
+ end
+ else
+ tfmdata = definers.loadfont(specification) -- can be overloaded
+ if tfmdata then
+ if trace_defining then
+ report_defining("loaded and hashed: %s",hash)
+ end
+ tfmdata.properties.hash = hash
+ if id then
+ definers.register(tfmdata,id)
+ end
+ else
+ if trace_defining then
+ report_defining("not loaded and hashed: %s",hash)
+ end
+ end
+ end
+ lastdefined = tfmdata or id -- todo ! ! ! ! !
+ if not tfmdata then -- or id?
+ report_defining( "unknown font %a, loading aborted",specification.name)
+ elseif trace_defining and type(tfmdata) == "table" then
+ local properties = tfmdata.properties or { }
+ local parameters = tfmdata.parameters or { }
+ report_defining("using %a font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
+ properties.format or "unknown", id, properties.name, parameters.size, properties.encodingbytes,
+ properties.encodingname, properties.fullname, file.basename(properties.filename))
+ end
+ statistics.stoptiming(fonts)
+ return tfmdata
+end
+
+function font.getfont(id)
+ return fontdata[id] -- otherwise issues
+end
+
+--[[ldx--
+<p>We overload the <l n='tfm'/> reader.</p>
+--ldx]]--
+
+callbacks.register('define_font', definers.read, "definition of fonts (tfmdata preparation)")
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-ini.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-ini.lua
new file mode 100644
index 00000000000..c547f89acf9
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-ini.lua
@@ -0,0 +1,32 @@
+if not modules then modules = { } end modules ['font-ini'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+<p>Not much is happening here.</p>
+--ldx]]--
+
+local allocate = utilities.storage.allocate
+
+local report_defining = logs.reporter("fonts","defining")
+
+fonts = fonts or { }
+local fonts = fonts
+
+fonts.hashes = { identifiers = allocate() }
+
+fonts.tables = fonts.tables or { }
+fonts.helpers = fonts.helpers or { }
+fonts.tracers = fonts.tracers or { } -- for the moment till we have move to moduledata
+fonts.specifiers = fonts.specifiers or { } -- in format !
+
+fonts.analyzers = { } -- not needed here
+fonts.readers = { }
+fonts.definers = { methods = { } }
+fonts.loggers = { register = function() end }
+
+fontloader.totable = fontloader.to_table -- not used
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-map.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-map.lua
new file mode 100644
index 00000000000..b645d9aef2b
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-map.lua
@@ -0,0 +1,441 @@
+if not modules then modules = { } end modules ['font-map'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local tonumber, next, type = tonumber, next, type
+
+local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower
+local P, R, S, C, Ct, Cc, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.match
+local utfbyte = utf.byte
+local floor = math.floor
+local formatters = string.formatters
+
+local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end)
+local trace_mapping = false trackers.register("fonts.mapping", function(v) trace_unimapping = v end)
+
+local report_fonts = logs.reporter("fonts","loading") -- not otf only
+
+local fonts = fonts or { }
+local mappings = fonts.mappings or { }
+fonts.mappings = mappings
+
+local allocate = utilities.storage.allocate
+
+--[[ldx--
+<p>Eventually this code will disappear because map files are kind
+of obsolete. Some code may move to runtime or auxiliary modules.</p>
+<p>The name to unciode related code will stay of course.</p>
+--ldx]]--
+
+-- local function loadlumtable(filename) -- will move to font goodies
+-- local lumname = file.replacesuffix(file.basename(filename),"lum")
+-- local lumfile = resolvers.findfile(lumname,"map") or ""
+-- if lumfile ~= "" and lfs.isfile(lumfile) then
+-- if trace_loading or trace_mapping then
+-- report_fonts("loading map table %a",lumfile)
+-- end
+-- lumunic = dofile(lumfile)
+-- return lumunic, lumfile
+-- end
+-- end
+
+local hex = R("AF","09")
+----- hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end
+----- hexsix = (hex*hex*hex*hex*hex*hex) / function(s) return tonumber(s,16) end
+local hexfour = (hex*hex*hex^-2) / function(s) return tonumber(s,16) end
+local hexsix = (hex*hex*hex^-4) / function(s) return tonumber(s,16) end
+local dec = (R("09")^1) / tonumber
+local period = P(".")
+local unicode = (P("uni") + P("UNI")) * (hexfour * (period + P(-1)) * Cc(false) + Ct(hexfour^1) * Cc(true)) -- base planes
+local ucode = (P("u") + P("U") ) * (hexsix * (period + P(-1)) * Cc(false) + Ct(hexsix ^1) * Cc(true)) -- extended
+local index = P("index") * dec * Cc(false)
+
+local parser = unicode + ucode + index
+
+local parsers = { }
+
+local function makenameparser(str)
+ if not str or str == "" then
+ return parser
+ else
+ local p = parsers[str]
+ if not p then
+ p = P(str) * period * dec * Cc(false)
+ parsers[str] = p
+ end
+ return p
+ end
+end
+
+local f_single = formatters["%04X"]
+local f_double = formatters["%04X%04X"]
+
+local function tounicode16(unicode,name)
+ if unicode < 0x10000 then
+ return f_single(unicode)
+ elseif unicode < 0x1FFFFFFFFF then
+ return f_double(floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a in %a into tounicode",unicode,name)
+ end
+end
+
+local function tounicode16sequence(unicodes,name)
+ local t = { }
+ for l=1,#unicodes do
+ local u = unicodes[l]
+ if u < 0x10000 then
+ t[l] = f_single(u)
+ elseif unicode < 0x1FFFFFFFFF then
+ t[l] = f_double(floor(u/1024),u%1024+0xDC00)
+ else
+ report_fonts ("can't convert %a in %a into tounicode",u,name)
+ return
+ end
+ end
+ return concat(t)
+end
+
+local function tounicode(unicode,name)
+ if type(unicode) == "table" then
+ local t = { }
+ for l=1,#unicode do
+ local u = unicode[l]
+ if u < 0x10000 then
+ t[l] = f_single(u)
+ elseif u < 0x1FFFFFFFFF then
+ t[l] = f_double(floor(u/1024),u%1024+0xDC00)
+ else
+ report_fonts ("can't convert %a in %a into tounicode",u,name)
+ return
+ end
+ end
+ return concat(t)
+ else
+ if unicode < 0x10000 then
+ return f_single(unicode)
+ elseif unicode < 0x1FFFFFFFFF then
+ return f_double(floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a in %a into tounicode",unicode,name)
+ end
+ end
+end
+
+
+local function fromunicode16(str)
+ if #str == 4 then
+ return tonumber(str,16)
+ else
+ local l, r = match(str,"(....)(....)")
+ return (tonumber(l,16))*0x400 + tonumber(r,16) - 0xDC00
+ end
+end
+
+-- Slightly slower:
+--
+-- local p = C(4) * (C(4)^-1) / function(l,r)
+-- if r then
+-- return (tonumber(l,16))*0x400 + tonumber(r,16) - 0xDC00
+-- else
+-- return tonumber(l,16)
+-- end
+-- end
+--
+-- local function fromunicode16(str)
+-- return lpegmatch(p,str)
+-- end
+
+-- This is quite a bit faster but at the cost of some memory but if we
+-- do this we will also use it elsewhere so let's not follow this route
+-- now. I might use this method in the plain variant (no caching there)
+-- but then I need a flag that distinguishes between code branches.
+--
+-- local cache = { }
+--
+-- function mappings.tounicode16(unicode)
+-- local s = cache[unicode]
+-- if not s then
+-- if unicode < 0x10000 then
+-- s = format("%04X",unicode)
+-- else
+-- s = format("%04X%04X",unicode/0x400+0xD800,unicode%0x400+0xDC00)
+-- end
+-- cache[unicode] = s
+-- end
+-- return s
+-- end
+
+mappings.makenameparser = makenameparser
+mappings.tounicode = tounicode
+mappings.tounicode16 = tounicode16
+mappings.tounicode16sequence = tounicode16sequence
+mappings.fromunicode16 = fromunicode16
+
+local ligseparator = P("_")
+local varseparator = P(".")
+local namesplitter = Ct(C((1 - ligseparator - varseparator)^1) * (ligseparator * C((1 - ligseparator - varseparator)^1))^0)
+
+-- maybe: ff fi fl ffi ffl => f_f f_i f_l f_f_i f_f_l
+
+-- local function test(name)
+-- local split = lpegmatch(namesplitter,name)
+-- print(string.formatters["%s: [% t]"](name,split))
+-- end
+
+-- test("i.f_")
+-- test("this")
+-- test("this.that")
+-- test("japan1.123")
+-- test("such_so_more")
+-- test("such_so_more.that")
+
+-- to be completed .. for fonts that use unicodes for ligatures which
+-- is a actually a bad thing and should be avoided in the first place
+
+local overloads = allocate {
+ IJ = { name = "I_J", unicode = { 0x49, 0x4A }, mess = 0x0132 },
+ ij = { name = "i_j", unicode = { 0x69, 0x6A }, mess = 0x0133 },
+ ff = { name = "f_f", unicode = { 0x66, 0x66 }, mess = 0xFB00 },
+ fi = { name = "f_i", unicode = { 0x66, 0x69 }, mess = 0xFB01 },
+ fl = { name = "f_l", unicode = { 0x66, 0x6C }, mess = 0xFB02 },
+ ffi = { name = "f_f_i", unicode = { 0x66, 0x66, 0x69 }, mess = 0xFB03 },
+ ffl = { name = "f_f_l", unicode = { 0x66, 0x66, 0x6C }, mess = 0xFB04 },
+ fj = { name = "f_j", unicode = { 0x66, 0x6A } },
+ fk = { name = "f_k", unicode = { 0x66, 0x6B } },
+}
+
+for k, v in next, overloads do
+ local name = v.name
+ local mess = v.mess
+ if name then
+ overloads[name] = v
+ end
+ if mess then
+ overloads[mess] = v
+ end
+end
+
+mappings.overloads = overloads
+
+function mappings.addtounicode(data,filename,checklookups)
+ local resources = data.resources
+ local unicodes = resources.unicodes
+ if not unicodes then
+ return
+ end
+ local properties = data.properties
+ local descriptions = data.descriptions
+ -- we need to move this code
+ unicodes['space'] = unicodes['space'] or 32
+ unicodes['hyphen'] = unicodes['hyphen'] or 45
+ unicodes['zwj'] = unicodes['zwj'] or 0x200D
+ unicodes['zwnj'] = unicodes['zwnj'] or 0x200C
+ --
+ local private = fonts.constructors and fonts.constructors.privateoffset or 0xF0000 -- 0x10FFFF
+ local unicodevector = fonts.encodings.agl.unicodes or { } -- loaded runtime in context
+ local contextvector = fonts.encodings.agl.ctxcodes or { } -- loaded runtime in context
+ local missing = { }
+ local nofmissing = 0
+ local oparser = nil
+ local cidnames = nil
+ local cidcodes = nil
+ local cidinfo = properties.cidinfo
+ local usedmap = cidinfo and fonts.cid.getmap(cidinfo)
+ local uparser = makenameparser() -- hm, every time?
+ if usedmap then
+ oparser = usedmap and makenameparser(cidinfo.ordering)
+ cidnames = usedmap.names
+ cidcodes = usedmap.unicodes
+ end
+ local ns = 0
+ local nl = 0
+ --
+ for unic, glyph in next, descriptions do
+ local name = glyph.name
+ if name then
+ local index = glyph.index
+ local r = overloads[name]
+ if r then
+ -- get rid of weird ligatures
+ -- glyph.name = r.name
+ glyph.unicode = r.unicode
+ elseif not unic or unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
+ local unicode = unicodevector[name] or contextvector[name]
+ if unicode then
+ glyph.unicode = unicode
+ ns = ns + 1
+ end
+ -- cidmap heuristics, beware, there is no guarantee for a match unless
+ -- the chain resolves
+ if (not unicode) and usedmap then
+ local foundindex = lpegmatch(oparser,name)
+ if foundindex then
+ unicode = cidcodes[foundindex] -- name to number
+ if unicode then
+ glyph.unicode = unicode
+ ns = ns + 1
+ else
+ local reference = cidnames[foundindex] -- number to name
+ if reference then
+ local foundindex = lpegmatch(oparser,reference)
+ if foundindex then
+ unicode = cidcodes[foundindex]
+ if unicode then
+ glyph.unicode = unicode
+ ns = ns + 1
+ end
+ end
+ if not unicode or unicode == "" then
+ local foundcodes, multiple = lpegmatch(uparser,reference)
+ if foundcodes then
+ glyph.unicode = foundcodes
+ if multiple then
+ nl = nl + 1
+ unicode = true
+ else
+ ns = ns + 1
+ unicode = foundcodes
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ -- a.whatever or a_b_c.whatever or a_b_c (no numbers) a.b_
+ --
+ -- It is not trivial to find a solution that suits all fonts. We tried several alternatives
+ -- and this one seems to work reasonable also with fonts that use less standardized naming
+ -- schemes. The extra private test is tested by KE and seems to work okay with non-typical
+ -- fonts as well.
+ --
+ if not unicode or unicode == "" then
+ local split = lpegmatch(namesplitter,name)
+ local nsplit = split and #split or 0 -- add if
+ if nsplit == 0 then
+ -- skip
+ elseif nsplit == 1 then
+ local base = split[1]
+ local u = unicodes[base] or unicodevector[base] or contextvector[name]
+ if not u then
+ -- skip
+ elseif type(u) == "table" then
+ -- unlikely
+ if u[1] < private then
+ unicode = u
+ glyph.unicode = unicode
+ end
+ elseif u < private then
+ unicode = u
+ glyph.unicode = unicode
+ end
+ else
+ local t, n = { }, 0
+ for l=1,nsplit do
+ local base = split[l]
+ local u = unicodes[base] or unicodevector[base] or contextvector[name]
+ if not u then
+ break
+ elseif type(u) == "table" then
+ if u[1] >= private then
+ break
+ end
+ n = n + 1
+ t[n] = u[1]
+ else
+ if u >= private then
+ break
+ end
+ n = n + 1
+ t[n] = u
+ end
+ end
+ if n > 0 then
+ if n == 1 then
+ unicode = t[1]
+ else
+ unicode = t
+ end
+ glyph.unicode = unicode
+ end
+ end
+ nl = nl + 1
+ end
+ -- last resort (we might need to catch private here as well)
+ if not unicode or unicode == "" then
+ local foundcodes, multiple = lpegmatch(uparser,name)
+ if foundcodes then
+ glyph.unicode = foundcodes
+ if multiple then
+ nl = nl + 1
+ unicode = true
+ else
+ ns = ns + 1
+ unicode = foundcodes
+ end
+ end
+ end
+ -- check using substitutes and alternates
+ local r = overloads[unicode]
+ if r then
+ unicode = r.unicode
+ glyph.unicode = unicode
+ end
+ --
+ if not unicode then
+ missing[unic] = true
+ nofmissing = nofmissing + 1
+ end
+ end
+ else
+ -- no name
+ end
+ end
+ if type(checklookups) == "function" then
+ checklookups(data,missing,nofmissing)
+ end
+ -- todo: go lowercase
+ if trace_mapping then
+ for unic, glyph in table.sortedhash(descriptions) do
+ local name = glyph.name
+ local index = glyph.index
+ local unicode = glyph.unicode
+ if unicode then
+ if type(unicode) == "table" then
+ local unicodes = { }
+ for i=1,#unicode do
+ unicodes[i] = formatters("%U",unicode[i])
+ end
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes)
+ else
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode)
+ end
+ else
+ report_fonts("internal slot %U, name %a, unicode %U",index,name,unic)
+ end
+ end
+ end
+ if trace_loading and (ns > 0 or nl > 0) then
+ report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
+ end
+end
+
+-- local parser = makenameparser("Japan1")
+-- local parser = makenameparser()
+-- local function test(str)
+-- local b, a = lpegmatch(parser,str)
+-- print((a and table.serialize(b)) or b)
+-- end
+-- test("a.sc")
+-- test("a")
+-- test("uni1234")
+-- test("uni1234.xx")
+-- test("uni12349876")
+-- test("u123400987600")
+-- test("index1234")
+-- test("Japan1.123")
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-otb.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-otb.lua
new file mode 100644
index 00000000000..c9f5d4aca9b
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-otb.lua
@@ -0,0 +1,707 @@
+if not modules then modules = { } end modules ['font-otb'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+local concat = table.concat
+local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
+local type, next, tonumber, tostring, rawget = type, next, tonumber, tostring, rawget
+local lpegmatch = lpeg.match
+local utfchar = utf.char
+
+local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end)
+local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
+local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end)
+local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end)
+local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end)
+local trace_ligatures_detail = false trackers.register("otf.ligatures.detail", function(v) trace_ligatures_detail = v end)
+local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
+local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
+
+local report_prepare = logs.reporter("fonts","otf prepare")
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+
+local otffeatures = otf.features
+local registerotffeature = otffeatures.register
+
+otf.defaultbasealternate = "none" -- first last
+
+local wildcard = "*"
+local default = "dflt"
+
+local formatters = string.formatters
+local f_unicode = formatters["%U"]
+local f_uniname = formatters["%U (%s)"]
+local f_unilist = formatters["% t (% t)"]
+
+local function gref(descriptions,n)
+ if type(n) == "number" then
+ local name = descriptions[n].name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num, nam, j = { }, { }, 0
+ for i=1,#n do
+ local ni = n[i]
+ if tonumber(ni) then -- first is likely a key
+ j = j + 1
+ local di = descriptions[ni]
+ num[j] = f_unicode(ni)
+ nam[j] = di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in base mode tracing>"
+ end
+end
+
+local function cref(feature,lookuptags,lookupname)
+ if lookupname then
+ return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname])
+ else
+ return formatters["feature %a"](feature)
+ end
+end
+
+local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment)
+ report_prepare("%s: base alternate %s => %s (%S => %S)",
+ cref(feature,lookuptags,lookupname),
+ gref(descriptions,unicode),
+ replacement and gref(descriptions,replacement),
+ value,
+ comment)
+end
+
+local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution)
+ report_prepare("%s: base substitution %s => %S",
+ cref(feature,lookuptags,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,substitution))
+end
+
+local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature)
+ report_prepare("%s: base ligature %s => %S",
+ cref(feature,lookuptags,lookupname),
+ gref(descriptions,ligature),
+ gref(descriptions,unicode))
+end
+
+local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value)
+ report_prepare("%s: base kern %s + %s => %S",
+ cref(feature,lookuptags,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,otherunicode),
+ value)
+end
+
+local basemethods = { }
+local basemethod = "<unset>"
+
+local function applybasemethod(what,...)
+ local m = basemethods[basemethod][what]
+ if m then
+ return m(...)
+ end
+end
+
+-- We need to make sure that luatex sees the difference between
+-- base fonts that have different glyphs in the same slots in fonts
+-- that have the same fullname (or filename). LuaTeX will merge fonts
+-- eventually (and subset later on). If needed we can use a more
+-- verbose name as long as we don't use <()<>[]{}/%> and the length
+-- is < 128.
+
+local basehash, basehashes, applied = { }, 1, { }
+
+local function registerbasehash(tfmdata)
+ local properties = tfmdata.properties
+ local hash = concat(applied," ")
+ local base = basehash[hash]
+ if not base then
+ basehashes = basehashes + 1
+ base = basehashes
+ basehash[hash] = base
+ end
+ properties.basehash = base
+ properties.fullname = properties.fullname .. "-" .. base
+ -- report_prepare("fullname base hash '%a, featureset %a",tfmdata.properties.fullname,hash)
+ applied = { }
+end
+
+local function registerbasefeature(feature,value)
+ applied[#applied+1] = feature .. "=" .. tostring(value)
+end
+
+-- The original basemode ligature builder used the names of components
+-- and did some expression juggling to get the chain right. The current
+-- variant starts with unicodes but still uses names to make the chain.
+-- This is needed because we have to create intermediates when needed
+-- but use predefined snippets when available. To some extend the
+-- current builder is more stupid but I don't worry that much about it
+-- as ligatures are rather predicatable.
+--
+-- Personally I think that an ff + i == ffi rule as used in for instance
+-- latin modern is pretty weird as no sane person will key that in and
+-- expect a glyph for that ligature plus the following character. Anyhow,
+-- as we need to deal with this, we do, but no guarantes are given.
+--
+-- latin modern dejavu
+--
+-- f+f 102 102 102 102
+-- f+i 102 105 102 105
+-- f+l 102 108 102 108
+-- f+f+i 102 102 105
+-- f+f+l 102 102 108 102 102 108
+-- ff+i 64256 105 64256 105
+-- ff+l 64256 108
+--
+-- As you can see here, latin modern is less complete than dejavu but
+-- in practice one will not notice it.
+--
+-- The while loop is needed because we need to resolve for instance
+-- pseudo names like hyphen_hyphen to endash so in practice we end
+-- up with a bit too many definitions but the overhead is neglectable.
+--
+-- We can have changed[first] or changed[second] but it quickly becomes
+-- messy if we need to take that into account.
+
+local trace = false
+
+local function finalize_ligatures(tfmdata,ligatures)
+ local nofligatures = #ligatures
+ if nofligatures > 0 then
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local resources = tfmdata.resources
+ local unicodes = resources.unicodes -- we use rawget in order to avoid bulding the table
+ local private = resources.private
+ local alldone = false
+ while not alldone do
+ local done = 0
+ for i=1,nofligatures do
+ local ligature = ligatures[i]
+ if ligature then
+ local unicode, lookupdata = ligature[1], ligature[2]
+ if trace_ligatures_detail then
+ report_prepare("building % a into %a",lookupdata,unicode)
+ end
+ local size = #lookupdata
+ local firstcode = lookupdata[1] -- [2]
+ local firstdata = characters[firstcode]
+ local okay = false
+ if firstdata then
+ local firstname = "ctx_" .. firstcode
+ for i=1,size-1 do -- for i=2,size-1 do
+ local firstdata = characters[firstcode]
+ if not firstdata then
+ firstcode = private
+ if trace_ligatures_detail then
+ report_prepare("defining %a as %a",firstname,firstcode)
+ end
+ unicodes[firstname] = firstcode
+ firstdata = { intermediate = true, ligatures = { } }
+ characters[firstcode] = firstdata
+ descriptions[firstcode] = { name = firstname }
+ private = private + 1
+ end
+ local target
+ local secondcode = lookupdata[i+1]
+ local secondname = firstname .. "_" .. secondcode
+ if i == size - 1 then
+ target = unicode
+ if not rawget(unicodes,secondname) then
+ unicodes[secondname] = unicode -- map final ligature onto intermediates
+ end
+ okay = true
+ else
+ target = rawget(unicodes,secondname)
+ if not target then
+ break
+ end
+ end
+ if trace_ligatures_detail then
+ report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
+ end
+ local firstligs = firstdata.ligatures
+ if firstligs then
+ firstligs[secondcode] = { char = target }
+ else
+ firstdata.ligatures = { [secondcode] = { char = target } }
+ end
+ firstcode = target
+ firstname = secondname
+ end
+ elseif trace_ligatures_detail then
+ report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target)
+ end
+ if okay then
+ ligatures[i] = false
+ done = done + 1
+ end
+ end
+ end
+ alldone = done == 0
+ end
+ if trace_ligatures_detail then
+ for k, v in table.sortedhash(characters) do
+ if v.ligatures then
+ table.print(v,k)
+ end
+ end
+ end
+ resources.private = private
+ return true
+ end
+end
+
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local resources = tfmdata.resources
+ local properties = tfmdata.properties
+ local changed = tfmdata.changed
+ local lookuphash = resources.lookuphash
+ local lookuptypes = resources.lookuptypes
+ local lookuptags = resources.lookuptags
+
+ local ligatures = { }
+ local alternate = tonumber(value) or true and 1
+ local defaultalt = otf.defaultbasealternate
+
+ local trace_singles = trace_baseinit and trace_singles
+ local trace_alternatives = trace_baseinit and trace_alternatives
+ local trace_ligatures = trace_baseinit and trace_ligatures
+
+ local actions = {
+ substitution = function(lookupdata,lookuptags,lookupname,description,unicode)
+ if trace_singles then
+ report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
+ end
+ changed[unicode] = lookupdata
+ end,
+ alternate = function(lookupdata,lookuptags,lookupname,description,unicode)
+ local replacement = lookupdata[alternate]
+ if replacement then
+ changed[unicode] = replacement
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt == "first" then
+ replacement = lookupdata[1]
+ changed[unicode] = replacement
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt == "last" then
+ replacement = lookupdata[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ end,
+ ligature = function(lookupdata,lookuptags,lookupname,description,unicode)
+ if trace_ligatures then
+ report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
+ end
+ ligatures[#ligatures+1] = { unicode, lookupdata }
+ end,
+ }
+
+ for unicode, character in next, characters do
+ local description = descriptions[unicode]
+ local lookups = description.slookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname = lookuplist[l]
+ local lookupdata = lookups[lookupname]
+ if lookupdata then
+ local lookuptype = lookuptypes[lookupname]
+ local action = actions[lookuptype]
+ if action then
+ action(lookupdata,lookuptags,lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ local lookups = description.mlookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname = lookuplist[l]
+ local lookuplist = lookups[lookupname]
+ if lookuplist then
+ local lookuptype = lookuptypes[lookupname]
+ local action = actions[lookuptype]
+ if action then
+ for i=1,#lookuplist do
+ action(lookuplist[i],lookuptags,lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+ properties.hasligatures = finalize_ligatures(tfmdata,ligatures)
+end
+
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) -- todo what kind of kerns, currently all
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local resources = tfmdata.resources
+ local properties = tfmdata.properties
+ local lookuptags = resources.lookuptags
+ local sharedkerns = { }
+ local traceindeed = trace_baseinit and trace_kerns
+ local haskerns = false
+ for unicode, character in next, characters do
+ local description = descriptions[unicode]
+ local rawkerns = description.kerns -- shared
+ if rawkerns then
+ local s = sharedkerns[rawkerns]
+ if s == false then
+ -- skip
+ elseif s then
+ character.kerns = s
+ else
+ local newkerns = character.kerns
+ local done = false
+ for l=1,#lookuplist do
+ local lookup = lookuplist[l]
+ local kerns = rawkerns[lookup]
+ if kerns then
+ for otherunicode, value in next, kerns do
+ if value == 0 then
+ -- maybe no 0 test here
+ elseif not newkerns then
+ newkerns = { [otherunicode] = value }
+ done = true
+ if traceindeed then
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
+ end
+ elseif not newkerns[otherunicode] then -- first wins
+ newkerns[otherunicode] = value
+ done = true
+ if traceindeed then
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
+ end
+ end
+ end
+ end
+ end
+ if done then
+ sharedkerns[rawkerns] = newkerns
+ character.kerns = newkerns -- no empty assignments
+ haskerns = true
+ else
+ sharedkerns[rawkerns] = false
+ end
+ end
+ end
+ end
+ properties.haskerns = haskerns
+end
+
+basemethods.independent = {
+ preparesubstitutions = preparesubstitutions,
+ preparepositionings = preparepositionings,
+}
+
+local function makefake(tfmdata,name,present)
+ local resources = tfmdata.resources
+ local private = resources.private
+ local character = { intermediate = true, ligatures = { } }
+ resources.unicodes[name] = private
+ tfmdata.characters[private] = character
+ tfmdata.descriptions[private] = { name = name }
+ resources.private = private + 1
+ present[name] = private
+ return character
+end
+
+local function make_1(present,tree,name)
+ for k, v in next, tree do
+ if k == "ligature" then
+ present[name] = v
+ else
+ make_1(present,v,name .. "_" .. k)
+ end
+ end
+end
+
+local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname)
+ for k, v in next, tree do
+ if k == "ligature" then
+ local character = characters[preceding]
+ if not character then
+ if trace_baseinit then
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding)
+ end
+ character = makefake(tfmdata,name,present)
+ end
+ local ligatures = character.ligatures
+ if ligatures then
+ ligatures[unicode] = { char = v }
+ else
+ character.ligatures = { [unicode] = { char = v } }
+ end
+ if done then
+ local d = done[lookupname]
+ if not d then
+ done[lookupname] = { "dummy", v }
+ else
+ d[#d+1] = v
+ end
+ end
+ else
+ local code = present[name] or unicode
+ local name = name .. "_" .. k
+ make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname)
+ end
+ end
+end
+
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local resources = tfmdata.resources
+ local changed = tfmdata.changed
+ local lookuphash = resources.lookuphash
+ local lookuptypes = resources.lookuptypes
+ local lookuptags = resources.lookuptags
+
+ local ligatures = { }
+ local alternate = tonumber(value) or true and 1
+ local defaultalt = otf.defaultbasealternate
+
+ local trace_singles = trace_baseinit and trace_singles
+ local trace_alternatives = trace_baseinit and trace_alternatives
+ local trace_ligatures = trace_baseinit and trace_ligatures
+
+ for l=1,#lookuplist do
+ local lookupname = lookuplist[l]
+ local lookupdata = lookuphash[lookupname]
+ local lookuptype = lookuptypes[lookupname]
+ for unicode, data in next, lookupdata do
+ if lookuptype == "substitution" then
+ if trace_singles then
+ report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data)
+ end
+ changed[unicode] = data
+ elseif lookuptype == "alternate" then
+ local replacement = data[alternate]
+ if replacement then
+ changed[unicode] = replacement
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt == "first" then
+ replacement = data[1]
+ changed[unicode] = replacement
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt == "last" then
+ replacement = data[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ elseif lookuptype == "ligature" then
+ ligatures[#ligatures+1] = { unicode, data, lookupname }
+ if trace_ligatures then
+ report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data)
+ end
+ end
+ end
+ end
+
+ local nofligatures = #ligatures
+
+ if nofligatures > 0 then
+
+ local characters = tfmdata.characters
+ local present = { }
+ local done = trace_baseinit and trace_ligatures and { }
+
+ for i=1,nofligatures do
+ local ligature = ligatures[i]
+ local unicode, tree = ligature[1], ligature[2]
+ make_1(present,tree,"ctx_"..unicode)
+ end
+
+ for i=1,nofligatures do
+ local ligature = ligatures[i]
+ local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3]
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname)
+ end
+
+ end
+
+end
+
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local resources = tfmdata.resources
+ local properties = tfmdata.properties
+ local lookuphash = resources.lookuphash
+ local lookuptags = resources.lookuptags
+ local traceindeed = trace_baseinit and trace_kerns
+ -- check out this sharedkerns trickery
+ for l=1,#lookuplist do
+ local lookupname = lookuplist[l]
+ local lookupdata = lookuphash[lookupname]
+ for unicode, data in next, lookupdata do
+ local character = characters[unicode]
+ local kerns = character.kerns
+ if not kerns then
+ kerns = { }
+ character.kerns = kerns
+ end
+ if traceindeed then
+ for otherunicode, kern in next, data do
+ if not kerns[otherunicode] and kern ~= 0 then
+ kerns[otherunicode] = kern
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern)
+ end
+ end
+ else
+ for otherunicode, kern in next, data do
+ if not kerns[otherunicode] and kern ~= 0 then
+ kerns[otherunicode] = kern
+ end
+ end
+ end
+ end
+ end
+
+end
+
+local function initializehashes(tfmdata)
+ nodeinitializers.features(tfmdata)
+end
+
+basemethods.shared = {
+ initializehashes = initializehashes,
+ preparesubstitutions = preparesubstitutions,
+ preparepositionings = preparepositionings,
+}
+
+basemethod = "independent"
+
+local function featuresinitializer(tfmdata,value)
+ if true then -- value then
+ local starttime = trace_preparing and os.clock()
+ local features = tfmdata.shared.features
+ local fullname = tfmdata.properties.fullname or "?"
+ if features then
+ applybasemethod("initializehashes",tfmdata)
+ local collectlookups = otf.collectlookups
+ local rawdata = tfmdata.shared.rawdata
+ local properties = tfmdata.properties
+ local script = properties.script -- or "dflt" -- can be nil
+ local language = properties.language -- or "dflt" -- can be nil
+ local basesubstitutions = rawdata.resources.features.gsub
+ local basepositionings = rawdata.resources.features.gpos
+ --
+ -- if basesubstitutions then
+ -- for feature, data in next, basesubstitutions do
+ -- local value = features[feature]
+ -- if value then
+ -- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
+ -- if validlookups then
+ -- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ -- registerbasefeature(feature,value)
+ -- end
+ -- end
+ -- end
+ -- end
+ -- if basepositionings then
+ -- for feature, data in next, basepositionings do
+ -- local value = features[feature]
+ -- if value then
+ -- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
+ -- if validlookups then
+ -- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
+ -- registerbasefeature(feature,value)
+ -- end
+ -- end
+ -- end
+ -- end
+ --
+ if basesubstitutions or basepositionings then
+ local sequences = tfmdata.resources.sequences
+ for s=1,#sequences do
+ local sequence = sequences[s]
+ local sfeatures = sequence.features
+ if sfeatures then
+ local order = sequence.order
+ if order then
+ for i=1,#order do --
+ local feature = order[i]
+ local value = features[feature]
+ if value then
+ local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
+ if not validlookups then
+ -- skip
+ elseif basesubstitutions and basesubstitutions[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value)
+ end
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ elseif basepositionings and basepositionings[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value)
+ end
+ applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ --
+ registerbasehash(tfmdata)
+ end
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname)
+ end
+ end
+end
+
+registerotffeature {
+ name = "features",
+ description = "features",
+ default = true,
+ initializers = {
+ -- position = 1, -- after setscript (temp hack ... we need to force script / language to 1
+ base = featuresinitializer,
+ }
+}
+
+-- independent : collect lookups independently (takes more runtime ... neglectable)
+-- shared : shares lookups with node mode (takes more memory unless also a node mode variant is used ... noticeable)
+
+directives.register("fonts.otf.loader.basemethod", function(v)
+ if basemethods[v] then
+ basemethod = v
+ end
+end)
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-otf.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-otf.lua
new file mode 100644
index 00000000000..f709e70006a
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-otf.lua
@@ -0,0 +1,3052 @@
+if not modules then modules = { } end modules ['font-otf'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- langs -> languages enz
+-- anchor_classes vs kernclasses
+-- modification/creationtime in subfont is runtime dus zinloos
+-- to_table -> totable
+-- ascent descent
+
+-- to be checked: combinations like:
+--
+-- current="ABCD" with [A]=nothing, [BC]=ligature, [D]=single (applied to result of BC so funny index)
+--
+-- unlikely but possible
+
+-- more checking against low level calls of functions
+
+local utfbyte = utf.byte
+local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
+local type, next, tonumber, tostring = type, next, tonumber, tostring
+local abs = math.abs
+local reversed, concat, insert, remove, sortedkeys = table.reversed, table.concat, table.insert, table.remove, table.sortedkeys
+local ioflush = io.flush
+local fastcopy, tohash, derivetable = table.fastcopy, table.tohash, table.derive
+local formatters = string.formatters
+local P, R, S, C, Ct, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.match
+
+local setmetatableindex = table.setmetatableindex
+local allocate = utilities.storage.allocate
+local registertracker = trackers.register
+local registerdirective = directives.register
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local elapsedtime = statistics.elapsedtime
+local findbinfile = resolvers.findbinfile
+
+local trace_private = false registertracker("otf.private", function(v) trace_private = v end)
+local trace_subfonts = false registertracker("otf.subfonts", function(v) trace_subfonts = v end)
+local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end)
+local trace_features = false registertracker("otf.features", function(v) trace_features = v end)
+local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end)
+local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end)
+local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end)
+local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end)
+
+local compact_lookups = true registertracker("otf.compactlookups", function(v) compact_lookups = v end)
+local purge_names = true registertracker("otf.purgenames", function(v) purge_names = v end)
+
+local report_otf = logs.reporter("fonts","otf loading")
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+
+otf.glists = { "gsub", "gpos" }
+
+otf.version = 2.819 -- beware: also sync font-mis.lua and in mtx-fonts
+otf.cache = containers.define("fonts", "otf", otf.version, true)
+
+local hashes = fonts.hashes
+local definers = fonts.definers
+local readers = fonts.readers
+local constructors = fonts.constructors
+
+local fontdata = hashes and hashes.identifiers
+local chardata = characters and characters.data -- not used
+
+local otffeatures = constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local enhancers = allocate()
+otf.enhancers = enhancers
+local patches = { }
+enhancers.patches = patches
+
+local forceload = false
+local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M)
+local packdata = true
+local syncspace = true
+local forcenotdef = false
+local includesubfonts = false
+local overloadkerns = false -- experiment
+
+local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes
+
+local wildcard = "*"
+local default = "dflt"
+
+local fontloader = fontloader
+local open_font = fontloader.open
+local close_font = fontloader.close
+local font_fields = fontloader.fields
+local apply_featurefile = fontloader.apply_featurefile
+
+local mainfields = nil
+local glyphfields = nil -- not used yet
+
+local formats = fonts.formats
+
+formats.otf = "opentype"
+formats.ttf = "truetype"
+formats.ttc = "truetype"
+formats.dfont = "truetype"
+
+registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end)
+registerdirective("fonts.otf.loader.force", function(v) forceload = v end)
+registerdirective("fonts.otf.loader.pack", function(v) packdata = v end)
+registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end)
+registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end)
+registerdirective("fonts.otf.loader.overloadkerns", function(v) overloadkerns = v end)
+-----------------("fonts.otf.loader.alldimensions", function(v) alldimensions = v end)
+
+function otf.fileformat(filename)
+ local leader = lower(io.loadchunk(filename,4))
+ local suffix = lower(file.suffix(filename))
+ if leader == "otto" then
+ return formats.otf, suffix == "otf"
+ elseif leader == "ttcf" then
+ return formats.ttc, suffix == "ttc"
+ -- elseif leader == "true" then
+ -- return formats.ttf, suffix == "ttf"
+ elseif suffix == "ttc" then
+ return formats.ttc, true
+ elseif suffix == "dfont" then
+ return formats.dfont, true
+ else
+ return formats.ttf, suffix == "ttf"
+ end
+end
+
+-- local function otf_format(filename)
+-- -- return formats[lower(file.suffix(filename))]
+-- end
+
+local function otf_format(filename)
+ local format, okay = otf.fileformat(filename)
+ if not okay then
+ report_otf("font %a is actually an %a file",filename,format)
+ end
+ return format
+end
+
+local function load_featurefile(raw,featurefile)
+ if featurefile and featurefile ~= "" then
+ if trace_loading then
+ report_otf("using featurefile %a", featurefile)
+ end
+ apply_featurefile(raw, featurefile)
+ end
+end
+
+local function showfeatureorder(rawdata,filename)
+ local sequences = rawdata.resources.sequences
+ if sequences and #sequences > 0 then
+ if trace_loading then
+ report_otf("font %a has %s sequences",filename,#sequences)
+ report_otf(" ")
+ end
+ for nos=1,#sequences do
+ local sequence = sequences[nos]
+ local typ = sequence.type or "no-type"
+ local name = sequence.name or "no-name"
+ local subtables = sequence.subtables or { "no-subtables" }
+ local features = sequence.features
+ if trace_loading then
+ report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables)
+ end
+ if features then
+ for feature, scripts in next, features do
+ local tt = { }
+ if type(scripts) == "table" then
+ for script, languages in next, scripts do
+ local ttt = { }
+ for language, _ in next, languages do
+ ttt[#ttt+1] = language
+ end
+ tt[#tt+1] = formatters["[%s: % t]"](script,ttt)
+ end
+ if trace_loading then
+ report_otf(" %s: % t",feature,tt)
+ end
+ else
+ if trace_loading then
+ report_otf(" %s: %S",feature,scripts)
+ end
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("\n")
+ end
+ elseif trace_loading then
+ report_otf("font %a has no sequences",filename)
+ end
+end
+
+--[[ldx--
+<p>We start with a lot of tables and related functions.</p>
+--ldx]]--
+
+local valid_fields = table.tohash {
+ -- "anchor_classes",
+ "ascent",
+ -- "cache_version",
+ "cidinfo",
+ "copyright",
+ -- "creationtime",
+ "descent",
+ "design_range_bottom",
+ "design_range_top",
+ "design_size",
+ "encodingchanged",
+ "extrema_bound",
+ "familyname",
+ "fontname",
+ "fontstyle_id",
+ "fontstyle_name",
+ "fullname",
+ -- "glyphs",
+ "hasvmetrics",
+ -- "head_optimized_for_cleartype",
+ "horiz_base",
+ "issans",
+ "isserif",
+ "italicangle",
+ -- "kerns",
+ -- "lookups",
+ "macstyle",
+ -- "modificationtime",
+ "onlybitmaps",
+ "origname",
+ "os2_version",
+ "pfminfo",
+ -- "private",
+ "serifcheck",
+ "sfd_version",
+ -- "size",
+ "strokedfont",
+ "strokewidth",
+ -- "subfonts",
+ "table_version",
+ -- "tables",
+ -- "ttf_tab_saved",
+ "ttf_tables",
+ "uni_interp",
+ "uniqueid",
+ "units_per_em",
+ "upos",
+ "use_typo_metrics",
+ "uwidth",
+ "validation_state",
+ "version",
+ "vert_base",
+ "weight",
+ "weight_width_slope_only",
+ -- "xuid",
+}
+
+local ordered_enhancers = {
+ "prepare tables",
+
+ "prepare glyphs",
+ "prepare lookups",
+
+ "analyze glyphs",
+ "analyze math",
+
+ -- "prepare tounicode",
+
+ "reorganize lookups",
+ "reorganize mark classes",
+ "reorganize anchor classes",
+
+ "reorganize glyph kerns",
+ "reorganize glyph lookups",
+ "reorganize glyph anchors",
+
+ "merge kern classes",
+
+ "reorganize features",
+ "reorganize subtables",
+
+ "check glyphs",
+ "check metadata",
+-- "check extra features", -- after metadata
+
+ "prepare tounicode",
+
+ "check encoding", -- moved
+ "add duplicates",
+
+ "expand lookups", -- a temp hack awaiting the lua loader
+
+-- "check extra features", -- after metadata and duplicates
+
+ "cleanup tables",
+
+ "compact lookups",
+ "purge names",
+}
+
+--[[ldx--
+<p>Here we go.</p>
+--ldx]]--
+
+local actions = allocate()
+local before = allocate()
+local after = allocate()
+
+patches.before = before
+patches.after = after
+
+local function enhance(name,data,filename,raw)
+ local enhancer = actions[name]
+ if enhancer then
+ if trace_loading then
+ report_otf("apply enhancement %a to file %a",name,filename)
+ ioflush()
+ end
+ enhancer(data,filename,raw)
+ else
+ -- no message as we can have private ones
+ end
+end
+
+function enhancers.apply(data,filename,raw)
+ local basename = file.basename(lower(filename))
+ if trace_loading then
+ report_otf("%s enhancing file %a","start",filename)
+ end
+ ioflush() -- we want instant messages
+ for e=1,#ordered_enhancers do
+ local enhancer = ordered_enhancers[e]
+ local b = before[enhancer]
+ if b then
+ for pattern, action in next, b do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ enhance(enhancer,data,filename,raw)
+ local a = after[enhancer]
+ if a then
+ for pattern, action in next, a do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ ioflush() -- we want instant messages
+ end
+ if trace_loading then
+ report_otf("%s enhancing file %a","stop",filename)
+ end
+ ioflush() -- we want instant messages
+end
+
+-- patches.register("before","migrate metadata","cambria",function() end)
+
+function patches.register(what,where,pattern,action)
+ local pw = patches[what]
+ if pw then
+ local ww = pw[where]
+ if ww then
+ ww[pattern] = action
+ else
+ pw[where] = { [pattern] = action}
+ end
+ end
+end
+
+function patches.report(fmt,...)
+ if trace_loading then
+ report_otf("patching: %s",formatters[fmt](...))
+ end
+end
+
+function enhancers.register(what,action) -- only already registered can be overloaded
+ actions[what] = action
+end
+
+function otf.load(filename,sub,featurefile) -- second argument (format) is gone !
+ local base = file.basename(file.removesuffix(filename))
+ local name = file.removesuffix(base)
+ local attr = lfs.attributes(filename)
+ local size = attr and attr.size or 0
+ local time = attr and attr.modification or 0
+ if featurefile then
+ name = name .. "@" .. file.removesuffix(file.basename(featurefile))
+ end
+ -- or: sub = tonumber(sub)
+ if sub == "" then
+ sub = false
+ end
+ local hash = name
+ if sub then
+ hash = hash .. "-" .. sub
+ end
+ hash = containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles = { }
+ for s in gmatch(featurefile,"[^,]+") do
+ local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name == "" then
+ report_otf("loading error, no featurefile %a",s)
+ else
+ local attr = lfs.attributes(name)
+ featurefiles[#featurefiles+1] = {
+ name = name,
+ size = attr and attr.size or 0,
+ time = attr and attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles == 0 then
+ featurefiles = nil
+ end
+ end
+ local data = containers.read(otf.cache,hash)
+ local reload = not data or data.size ~= size or data.time ~= time
+ if forceload then
+ report_otf("forced reload of %a due to hard coded flag",filename)
+ reload = true
+ end
+ if not reload then
+ local featuredata = data.featuredata
+ if featurefiles then
+ if not featuredata or #featuredata ~= #featurefiles then
+ reload = true
+ else
+ for i=1,#featurefiles do
+ local fi, fd = featurefiles[i], featuredata[i]
+ if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then
+ reload = true
+ break
+ end
+ end
+ end
+ elseif featuredata then
+ reload = true
+ end
+ if reload then
+ report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
+ end
+ end
+ if reload then
+ starttiming("fontloader")
+ report_otf("loading %a, hash %a",filename,hash)
+ local fontdata, messages
+ if sub then
+ fontdata, messages = open_font(filename,sub)
+ else
+ fontdata, messages = open_font(filename)
+ end
+ if fontdata then
+ mainfields = mainfields or (font_fields and font_fields(fontdata))
+ end
+ if trace_loading and messages and #messages > 0 then
+ if type(messages) == "string" then
+ report_otf("warning: %s",messages)
+ else
+ for m=1,#messages do
+ report_otf("warning: %S",messages[m])
+ end
+ end
+ else
+ report_otf("loading done")
+ end
+ if fontdata then
+ if featurefiles then
+ for i=1,#featurefiles do
+ load_featurefile(fontdata,featurefiles[i].name)
+ end
+ end
+ local unicodes = {
+ -- names to unicodes
+ }
+ local splitter = lpeg.splitter(" ",unicodes)
+ data = {
+ size = size,
+ time = time,
+ subfont = sub,
+ format = otf_format(filename),
+ featuredata = featurefiles,
+ resources = {
+ filename = resolvers.unresolve(filename), -- no shortcut
+ version = otf.version,
+ creator = "context mkiv",
+ unicodes = unicodes,
+ indices = {
+ -- index to unicodes
+ },
+ duplicates = {
+ -- alternative unicodes
+ },
+ variants = {
+ -- alternative unicodes (variants)
+ },
+ lookuptypes = {
+ },
+ },
+ warnings = {
+ },
+ metadata = {
+ -- raw metadata, not to be used
+ },
+ properties = {
+ -- normalized metadata
+ },
+ descriptions = {
+ },
+ goodies = {
+ },
+ helpers = { -- might go away
+ tounicodelist = splitter,
+ tounicodetable = Ct(splitter),
+ },
+ }
+ report_otf("file size: %s", size)
+ enhancers.apply(data,filename,fontdata)
+ local packtime = { }
+ if packdata then
+ if cleanup > 0 then
+ collectgarbage("collect")
+ end
+ starttiming(packtime)
+ enhance("pack",data,filename,nil)
+ stoptiming(packtime)
+ end
+ report_otf("saving %a in cache",filename)
+ data = containers.write(otf.cache, hash, data)
+ if cleanup > 1 then
+ collectgarbage("collect")
+ end
+ stoptiming("fontloader")
+ if elapsedtime then -- not in generic
+ report_otf("loading, optimizing, packing and caching time %s, pack time %s",
+ elapsedtime("fontloader"),packdata and elapsedtime(packtime) or 0)
+ end
+ close_font(fontdata) -- free memory
+ if cleanup > 3 then
+ collectgarbage("collect")
+ end
+ data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
+ if cleanup > 2 then
+ collectgarbage("collect")
+ end
+ else
+ stoptiming("fontloader")
+ data = nil
+ report_otf("loading failed due to read error")
+ end
+ end
+ if data then
+ if trace_defining then
+ report_otf("loading from cache using hash %a",hash)
+ end
+ enhance("unpack",data,filename,nil,false)
+ --
+ local resources = data.resources
+ local lookuptags = resources.lookuptags
+ local unicodes = resources.unicodes
+ if not lookuptags then
+ lookuptags = { }
+ resources.lookuptags = lookuptags
+ end
+ setmetatableindex(lookuptags,function(t,k)
+ local v = type(k) == "number" and ("lookup " .. k) or k
+ t[k] = v
+ return v
+ end)
+ if not unicodes then
+ unicodes = { }
+ resources.unicodes = unicodes
+ setmetatableindex(unicodes,function(t,k)
+ -- use rawget when no table has to be built
+ setmetatableindex(unicodes,nil)
+ for u, d in next, data.descriptions do
+ local n = d.name
+ if n then
+ t[n] = u
+ -- report_otf("accessing known name %a",k)
+ else
+ -- report_otf("accessing unknown name %a",k)
+ end
+ end
+ return rawget(t,k)
+ end)
+ end
+ constructors.addcoreunicodes(unicodes) -- do we really need this?
+ --
+ if applyruntimefixes then
+ applyruntimefixes(filename,data)
+ end
+ enhance("add dimensions",data,filename,nil,false)
+enhance("check extra features",data,filename)
+ if trace_sequences then
+ showfeatureorder(data,filename)
+ end
+ end
+ return data
+end
+
+local mt = {
+ __index = function(t,k) -- maybe set it
+ if k == "height" then
+ local ht = t.boundingbox[4]
+ return ht < 0 and 0 or ht
+ elseif k == "depth" then
+ local dp = -t.boundingbox[2]
+ return dp < 0 and 0 or dp
+ elseif k == "width" then
+ return 0
+ elseif k == "name" then -- or maybe uni*
+ return forcenotdef and ".notdef"
+ end
+ end
+}
+
+actions["prepare tables"] = function(data,filename,raw)
+ data.properties.hasitalics = false
+end
+
+actions["add dimensions"] = function(data,filename)
+ -- todo: forget about the width if it's the defaultwidth (saves mem)
+ -- we could also build the marks hash here (instead of storing it)
+ if data then
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local defaultwidth = resources.defaultwidth or 0
+ local defaultheight = resources.defaultheight or 0
+ local defaultdepth = resources.defaultdepth or 0
+ local basename = trace_markwidth and file.basename(filename)
+ for _, d in next, descriptions do
+ local bb, wd = d.boundingbox, d.width
+ if not wd then
+ -- or bb?
+ d.width = defaultwidth
+ elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+ -- d.width = -wd
+ end
+ -- if forcenotdef and not d.name then
+ -- d.name = ".notdef"
+ -- end
+ if bb then
+ local ht = bb[4]
+ local dp = -bb[2]
+ -- if alldimensions then
+ -- if ht ~= 0 then
+ -- d.height = ht
+ -- end
+ -- if dp ~= 0 then
+ -- d.depth = dp
+ -- end
+ -- else
+ if ht == 0 or ht < 0 then
+ -- not set
+ else
+ d.height = ht
+ end
+ if dp == 0 or dp < 0 then
+ -- not set
+ else
+ d.depth = dp
+ end
+ -- end
+ end
+ end
+ end
+end
+
+local function somecopy(old) -- fast one
+ if old then
+ local new = { }
+ if type(old) == "table" then
+ for k, v in next, old do
+ if k == "glyphs" then
+ -- skip
+ elseif type(v) == "table" then
+ new[k] = somecopy(v)
+ else
+ new[k] = v
+ end
+ end
+ else
+ for i=1,#mainfields do
+ local k = mainfields[i]
+ local v = old[k]
+ if k == "glyphs" then
+ -- skip
+ elseif type(v) == "table" then
+ new[k] = somecopy(v)
+ else
+ new[k] = v
+ end
+ end
+ end
+ return new
+ else
+ return { }
+ end
+end
+
+-- not setting hasitalics and class (when nil) during table construction can save some mem
+
+actions["prepare glyphs"] = function(data,filename,raw)
+ local tableversion = tonumber(raw.table_version) or 0
+ local rawglyphs = raw.glyphs
+ local rawsubfonts = raw.subfonts
+ local rawcidinfo = raw.cidinfo
+ local criterium = constructors.privateoffset
+ local private = criterium
+ local resources = data.resources
+ local metadata = data.metadata
+ local properties = data.properties
+ local descriptions = data.descriptions
+ local unicodes = resources.unicodes -- name to unicode
+ local indices = resources.indices -- index to unicode
+ local duplicates = resources.duplicates
+ local variants = resources.variants
+
+ if rawsubfonts then
+
+ metadata.subfonts = includesubfonts and { }
+ properties.cidinfo = rawcidinfo
+
+ if rawcidinfo.registry then
+ local cidmap = fonts.cid.getmap(rawcidinfo)
+ if cidmap then
+ rawcidinfo.usedname = cidmap.usedname
+ local nofnames = 0
+ local nofunicodes = 0
+ local cidunicodes = cidmap.unicodes
+ local cidnames = cidmap.names
+ local cidtotal = 0
+ local unique = trace_subfonts and { }
+ for cidindex=1,#rawsubfonts do
+ local subfont = rawsubfonts[cidindex]
+ local cidglyphs = subfont.glyphs
+ if includesubfonts then
+ metadata.subfonts[cidindex] = somecopy(subfont)
+ end
+ local cidcnt, cidmin, cidmax
+ if tableversion > 0.3 then
+ -- we have delayed loading so we cannot use next
+ cidcnt = subfont.glyphcnt
+ cidmin = subfont.glyphmin
+ cidmax = subfont.glyphmax
+ else
+ cidcnt = subfont.glyphcnt
+ cidmin = 0
+ cidmax = cidcnt - 1
+ end
+ if trace_subfonts then
+ local cidtot = cidmax - cidmin + 1
+ cidtotal = cidtotal + cidtot
+ report_otf("subfont: %i, min: %i, max: %i, cnt: %i, n: %i",cidindex,cidmin,cidmax,cidtot,cidcnt)
+ end
+ if cidcnt > 0 then
+ for cidslot=cidmin,cidmax do
+ local glyph = cidglyphs[cidslot]
+ if glyph then
+ local index = tableversion > 0.3 and glyph.orig_pos or cidslot
+ if trace_subfonts then
+ unique[index] = true
+ end
+ local unicode = glyph.unicode
+ if unicode >= 0x00E000 and unicode <= 0x00F8FF then
+ unicode = -1
+ elseif unicode >= 0x0F0000 and unicode <= 0x0FFFFD then
+ unicode = -1
+ elseif unicode >= 0x100000 and unicode <= 0x10FFFD then
+ unicode = -1
+ end
+ local name = glyph.name or cidnames[index]
+ if not unicode or unicode == -1 then -- or unicode >= criterium then
+ unicode = cidunicodes[index]
+ end
+ if unicode and descriptions[unicode] then
+ if trace_private then
+ report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
+ end
+ unicode = -1
+ end
+ if not unicode or unicode == -1 then -- or unicode >= criterium then
+ if not name then
+ name = formatters["u%06X.ctx"](private)
+ end
+ unicode = private
+ unicodes[name] = private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private = private + 1
+ nofnames = nofnames + 1
+ else
+ -- if unicode > criterium then
+ -- local taken = descriptions[unicode]
+ -- if taken then
+ -- private = private + 1
+ -- descriptions[private] = taken
+ -- unicodes[taken.name] = private
+ -- indices[taken.index] = private
+ -- if trace_private then
+ -- report_otf("slot %U is moved to %U due to private in font",unicode)
+ -- end
+ -- end
+ -- end
+ if not name then
+ name = formatters["u%06X.ctx"](unicode)
+ end
+ unicodes[name] = unicode
+ nofunicodes = nofunicodes + 1
+ end
+ indices[index] = unicode -- each index is unique (at least now)
+ local description = {
+ -- width = glyph.width,
+ boundingbox = glyph.boundingbox,
+ -- name = glyph.name or name or "unknown", -- uniXXXX
+ name = name or "unknown", -- uniXXXX
+ cidindex = cidindex,
+ index = cidslot,
+ glyph = glyph,
+ }
+ descriptions[unicode] = description
+ local altuni = glyph.altuni
+ if altuni then
+ -- local d
+ for i=1,#altuni do
+ local a = altuni[i]
+ local u = a.unicode
+ if u ~= unicode then
+ local v = a.variant
+ if v then
+ -- tricky: no addition to d? needs checking but in practice such dups are either very simple
+ -- shapes or e.g cjk with not that many features
+ local vv = variants[v]
+ if vv then
+ vv[u] = unicode
+ else -- xits-math has some:
+ vv = { [u] = unicode }
+ variants[v] = vv
+ end
+ -- elseif d then
+ -- d[#d+1] = u
+ -- else
+ -- d = { u }
+ end
+ end
+ end
+ -- if d then
+ -- duplicates[unicode] = d -- is this needed ?
+ -- end
+ end
+ end
+ end
+ else
+ report_otf("potential problem: no glyphs found in subfont %i",cidindex)
+ end
+ end
+ if trace_subfonts then
+ report_otf("nofglyphs: %i, unique: %i",cidtotal,table.count(unique))
+ end
+ if trace_loading then
+ report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
+ end
+ elseif trace_loading then
+ report_otf("unable to remap cid font, missing cid file for %a",filename)
+ end
+ elseif trace_loading then
+ report_otf("font %a has no glyphs",filename)
+ end
+
+ else
+
+ local cnt = raw.glyphcnt or 0
+ local min = tableversion > 0.3 and raw.glyphmin or 0
+ local max = tableversion > 0.3 and raw.glyphmax or (raw.glyphcnt - 1)
+ if cnt > 0 then
+-- for index=0,cnt-1 do
+ for index=min,max do
+ local glyph = rawglyphs[index]
+ if glyph then
+ local unicode = glyph.unicode
+ local name = glyph.name
+ if not unicode or unicode == -1 then -- or unicode >= criterium then
+ unicode = private
+ unicodes[name] = private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private = private + 1
+ else
+ -- We have a font that uses and exposes the private area. As this is rather unreliable it's
+ -- advised no to trust slots here (better use glyphnames). Anyway, we need a double check:
+ -- we need to move already moved entries and we also need to bump the next private to after
+ -- the (currently) last slot. This could leave us with a hole but we have holes anyway.
+ if unicode > criterium then
+ -- \definedfont[file:HANBatang-LVT.ttf] \fontchar{uF0135} \char"F0135
+ local taken = descriptions[unicode]
+ if taken then
+ if unicode >= private then
+ private = unicode + 1 -- restart private (so we can have mixed now)
+ else
+ private = private + 1 -- move on
+ end
+ descriptions[private] = taken
+ unicodes[taken.name] = private
+ indices[taken.index] = private
+ if trace_private then
+ report_otf("slot %U is moved to %U due to private in font",unicode)
+ end
+ else
+ if unicode >= private then
+ private = unicode + 1 -- restart (so we can have mixed now)
+ end
+ end
+ end
+ unicodes[name] = unicode
+ end
+ indices[index] = unicode
+ -- if not name then
+ -- name = formatters["u%06X"](unicode) -- u%06X.ctx
+ -- end
+ descriptions[unicode] = {
+ -- width = glyph.width,
+ boundingbox = glyph.boundingbox,
+ name = name,
+ index = index,
+ glyph = glyph,
+ }
+ local altuni = glyph.altuni
+ if altuni then
+ -- local d
+ for i=1,#altuni do
+ local a = altuni[i]
+ local u = a.unicode
+ if u ~= unicode then
+ local v = a.variant
+ if v then
+ -- tricky: no addition to d? needs checking but in practice such dups are either very simple
+ -- shapes or e.g cjk with not that many features
+ local vv = variants[v]
+ if vv then
+ vv[u] = unicode
+ else -- xits-math has some:
+ vv = { [u] = unicode }
+ variants[v] = vv
+ end
+ -- elseif d then
+ -- d[#d+1] = u
+ -- else
+ -- d = { u }
+ end
+ end
+ end
+ -- if d then
+ -- duplicates[unicode] = d -- is this needed ?
+ -- end
+ end
+ else
+ report_otf("potential problem: glyph %U is used but empty",index)
+ end
+ end
+ else
+ report_otf("potential problem: no glyphs found")
+ end
+
+ end
+
+ resources.private = private
+
+end
+
+-- the next one is still messy but will get better when we have
+-- flattened map/enc tables in the font loader
+
+-- the next one is not using a valid base for unicode privates
+--
+-- PsuedoEncodeUnencoded(EncMap *map,struct ttfinfo *info)
+
+actions["check encoding"] = function(data,filename,raw)
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local properties = data.properties
+ local unicodes = resources.unicodes -- name to unicode
+ local indices = resources.indices -- index to unicodes
+ local duplicates = resources.duplicates
+
+ -- begin of messy (not needed when cidmap)
+
+ local mapdata = raw.map or { }
+ local unicodetoindex = mapdata and mapdata.map or { }
+ local indextounicode = mapdata and mapdata.backmap or { }
+ -- local encname = lower(data.enc_name or raw.enc_name or mapdata.enc_name or "")
+ local encname = lower(data.enc_name or mapdata.enc_name or "")
+ local criterium = 0xFFFF -- for instance cambria has a lot of mess up there
+ local privateoffset = constructors.privateoffset
+
+ -- end of messy
+
+ if find(encname,"unicode") then -- unicodebmp, unicodefull, ...
+ if trace_loading then
+ report_otf("checking embedded unicode map %a",encname)
+ end
+ local reported = { }
+ -- we loop over the original unicode->index mapping but we
+ -- need to keep in mind that that one can have weird entries
+ -- so we need some extra checking
+ for maybeunicode, index in next, unicodetoindex do
+ if descriptions[maybeunicode] then
+ -- we ignore invalid unicodes (unicode = -1) (ff can map wrong to non private)
+ else
+ local unicode = indices[index]
+ if not unicode then
+ -- weird (cjk or so?)
+ elseif maybeunicode == unicode then
+ -- no need to add
+ elseif unicode > privateoffset then
+ -- we have a non-unicode
+ else
+ local d = descriptions[unicode]
+ if d then
+ local c = d.copies
+ if c then
+ c[maybeunicode] = true
+ else
+ d.copies = { [maybeunicode] = true }
+ end
+ elseif index and not reported[index] then
+ report_otf("missing index %i",index)
+ reported[index] = true
+ end
+ end
+ end
+ end
+ for unicode, data in next, descriptions do
+ local d = data.copies
+ if d then
+ duplicates[unicode] = sortedkeys(d)
+ data.copies = nil
+ end
+ end
+ elseif properties.cidinfo then
+ report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
+ else
+ report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever")
+ end
+
+ if mapdata then
+ mapdata.map = { } -- clear some memory (virtual and created each time anyway)
+ mapdata.backmap = { } -- clear some memory (virtual and created each time anyway)
+ end
+end
+
+-- for the moment we assume that a font with lookups will not use
+-- altuni so we stick to kerns only .. alternatively we can always
+-- do an indirect lookup uni_to_uni . but then we need that in
+-- all lookups
+
+actions["add duplicates"] = function(data,filename,raw)
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local properties = data.properties
+ local unicodes = resources.unicodes -- name to unicode
+ local indices = resources.indices -- index to unicodes
+ local duplicates = resources.duplicates
+ for unicode, d in next, duplicates do
+ local nofduplicates = #d
+ if nofduplicates > 4 then
+ if trace_loading then
+ report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
+ end
+ else
+ -- local validduplicates = { }
+ for i=1,nofduplicates do
+ local u = d[i]
+ if not descriptions[u] then
+ local description = descriptions[unicode]
+ local n = 0
+ for _, description in next, descriptions do
+ local kerns = description.kerns
+ if kerns then
+ for _, k in next, kerns do
+ local ku = k[unicode]
+ if ku then
+ k[u] = ku
+ n = n + 1
+ end
+ end
+ end
+ -- todo: lookups etc
+ end
+ if u > 0 then -- and
+ local duplicate = table.copy(description) -- else packing problem
+ duplicate.comment = formatters["copy of %U"](unicode)
+ descriptions[u] = duplicate
+ -- validduplicates[#validduplicates+1] = u
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ end
+ end
+ end
+ end
+ -- duplicates[unicode] = #validduplicates > 0 and validduplicates or nil
+ end
+ end
+end
+
+-- class : nil base mark ligature component (maybe we don't need it in description)
+-- boundingbox: split into ht/dp takes more memory (larger tables and less sharing)
+
+actions["analyze glyphs"] = function(data,filename,raw) -- maybe integrate this in the previous
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local metadata = data.metadata
+ local properties = data.properties
+ local hasitalics = false
+ local widths = { }
+ local marks = { } -- always present (saves checking)
+ for unicode, description in next, descriptions do
+ local glyph = description.glyph
+ local italic = glyph.italic_correction -- only in a math font (we also have vert/horiz)
+ if not italic then
+ -- skip
+ elseif italic == 0 then
+ -- skip
+ else
+ description.italic = italic
+ hasitalics = true
+ end
+ local width = glyph.width
+ widths[width] = (widths[width] or 0) + 1
+ local class = glyph.class
+ if class then
+ if class == "mark" then
+ marks[unicode] = true
+ end
+ description.class = class
+ end
+ end
+ -- flag italic
+ properties.hasitalics = hasitalics
+ -- flag marks
+ resources.marks = marks
+ -- share most common width for cjk fonts
+ local wd, most = 0, 1
+ for k,v in next, widths do
+ if v > most then
+ wd, most = k, v
+ end
+ end
+ if most > 1000 then -- maybe 500
+ if trace_loading then
+ report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
+ end
+ for unicode, description in next, descriptions do
+ if description.width == wd then
+ -- description.width = nil
+ else
+ description.width = description.glyph.width
+ end
+ end
+ resources.defaultwidth = wd
+ else
+ for unicode, description in next, descriptions do
+ description.width = description.glyph.width
+ end
+ end
+end
+
+actions["reorganize mark classes"] = function(data,filename,raw)
+ local mark_classes = raw.mark_classes
+ if mark_classes then
+ local resources = data.resources
+ local unicodes = resources.unicodes
+ local markclasses = { }
+ resources.markclasses = markclasses -- reversed
+ for name, class in next, mark_classes do
+ local t = { }
+ for s in gmatch(class,"[^ ]+") do
+ t[unicodes[s]] = true
+ end
+ markclasses[name] = t
+ end
+ end
+end
+
+actions["reorganize features"] = function(data,filename,raw) -- combine with other
+ local features = { }
+ data.resources.features = features
+ for k=1,#otf.glists do
+ local what = otf.glists[k]
+ local dw = raw[what]
+ if dw then
+ local f = { }
+ features[what] = f
+ for i=1,#dw do
+ local d= dw[i]
+ local dfeatures = d.features
+ if dfeatures then
+ for i=1,#dfeatures do
+ local df = dfeatures[i]
+ local tag = strip(lower(df.tag))
+ local ft = f[tag]
+ if not ft then
+ ft = { }
+ f[tag] = ft
+ end
+ local dscripts = df.scripts
+ for i=1,#dscripts do
+ local d = dscripts[i]
+ local languages = d.langs
+ local script = strip(lower(d.script))
+ local fts = ft[script] if not fts then fts = {} ft[script] = fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))] = true
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+actions["reorganize anchor classes"] = function(data,filename,raw)
+ local resources = data.resources
+ local anchor_to_lookup = { }
+ local lookup_to_anchor = { }
+ resources.anchor_to_lookup = anchor_to_lookup
+ resources.lookup_to_anchor = lookup_to_anchor
+ local classes = raw.anchor_classes -- anchor classes not in final table
+ if classes then
+ for c=1,#classes do
+ local class = classes[c]
+ local anchor = class.name
+ local lookups = class.lookup
+ if type(lookups) ~= "table" then
+ lookups = { lookups }
+ end
+ local a = anchor_to_lookup[anchor]
+ if not a then
+ a = { }
+ anchor_to_lookup[anchor] = a
+ end
+ for l=1,#lookups do
+ local lookup = lookups[l]
+ local l = lookup_to_anchor[lookup]
+ if l then
+ l[anchor] = true
+ else
+ l = { [anchor] = true }
+ lookup_to_anchor[lookup] = l
+ end
+ a[lookup] = true
+ end
+ end
+ end
+end
+
+-- local function checklookups(data,missing,nofmissing)
+-- local resources = data.resources
+-- local unicodes = resources.unicodes
+-- local lookuptypes = resources.lookuptypes
+-- if not unicodes or not lookuptypes then
+-- return
+-- elseif nofmissing <= 0 then
+-- return
+-- end
+-- local descriptions = data.descriptions
+-- local private = fonts.constructors and fonts.constructors.privateoffset or 0xF0000 -- 0x10FFFF
+-- --
+-- local ns, nl = 0, 0
+
+-- local guess = { }
+-- -- helper
+-- local function check(gname,code,unicode)
+-- local description = descriptions[code]
+-- -- no need to add a self reference
+-- local variant = description.name
+-- if variant == gname then
+-- return
+-- end
+-- -- the variant already has a unicode (normally that results in a default tounicode to self)
+-- local unic = unicodes[variant]
+-- if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
+-- -- no default mapping and therefore maybe no tounicode yet
+-- else
+-- return
+-- end
+-- -- the variant already has a tounicode
+-- if descriptions[code].unicode then
+-- return
+-- end
+-- -- add to the list
+-- local g = guess[variant]
+-- -- local r = overloads[unicode]
+-- -- if r then
+-- -- unicode = r.unicode
+-- -- end
+-- if g then
+-- g[gname] = unicode
+-- else
+-- guess[variant] = { [gname] = unicode }
+-- end
+-- end
+-- --
+-- for unicode, description in next, descriptions do
+-- local slookups = description.slookups
+-- if slookups then
+-- local gname = description.name
+-- for tag, data in next, slookups do
+-- local lookuptype = lookuptypes[tag]
+-- if lookuptype == "alternate" then
+-- for i=1,#data do
+-- check(gname,data[i],unicode)
+-- end
+-- elseif lookuptype == "substitution" then
+-- check(gname,data,unicode)
+-- end
+-- end
+-- end
+-- local mlookups = description.mlookups
+-- if mlookups then
+-- local gname = description.name
+-- for tag, list in next, mlookups do
+-- local lookuptype = lookuptypes[tag]
+-- if lookuptype == "alternate" then
+-- for i=1,#list do
+-- local data = list[i]
+-- for i=1,#data do
+-- check(gname,data[i],unicode)
+-- end
+-- end
+-- elseif lookuptype == "substitution" then
+-- for i=1,#list do
+-- check(gname,list[i],unicode)
+-- end
+-- end
+-- end
+-- end
+-- end
+-- -- resolve references
+-- local done = true
+-- while done do
+-- done = false
+-- for k, v in next, guess do
+-- if type(v) ~= "number" then
+-- for kk, vv in next, v do
+-- if vv == -1 or vv >= private or (vv >= 0xE000 and vv <= 0xF8FF) or vv == 0xFFFE or vv == 0xFFFF then
+-- local uu = guess[kk]
+-- if type(uu) == "number" then
+-- guess[k] = uu
+-- done = true
+-- end
+-- else
+-- guess[k] = vv
+-- done = true
+-- end
+-- end
+-- end
+-- end
+-- end
+-- -- wrap up
+-- local orphans = 0
+-- local guessed = 0
+-- for k, v in next, guess do
+-- if type(v) == "number" then
+-- descriptions[unicodes[k]].unicode = descriptions[v].unicode or v -- can also be a table
+-- guessed = guessed + 1
+-- else
+-- local t = nil
+-- local l = lower(k)
+-- local u = unicodes[l]
+-- if not u then
+-- orphans = orphans + 1
+-- elseif u == -1 or u >= private or (u >= 0xE000 and u <= 0xF8FF) or u == 0xFFFE or u == 0xFFFF then
+-- local unicode = descriptions[u].unicode
+-- if unicode then
+-- descriptions[unicodes[k]].unicode = unicode
+-- guessed = guessed + 1
+-- else
+-- orphans = orphans + 1
+-- end
+-- else
+-- orphans = orphans + 1
+-- end
+-- end
+-- end
+-- if trace_loading and orphans > 0 or guessed > 0 then
+-- report_otf("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans)
+-- end
+-- end
+
+actions["prepare tounicode"] = function(data,filename,raw)
+ fonts.mappings.addtounicode(data,filename)
+end
+
+local g_directions = {
+ gsub_contextchain = 1,
+ gpos_contextchain = 1,
+ -- gsub_context = 1,
+ -- gpos_context = 1,
+ gsub_reversecontextchain = -1,
+ gpos_reversecontextchain = -1,
+}
+-- The following is no longer needed as AAT is ignored per end October 2013.
+--
+-- -- Research by Khaled Hosny has demonstrated that the font loader merges
+-- -- regular and AAT features and that these can interfere (especially because
+-- -- we dropped checking for valid features elsewhere. So, we just check for
+-- -- the special flag and drop the feature if such a tag is found.
+--
+-- local function supported(features)
+-- for i=1,#features do
+-- if features[i].ismac then
+-- return false
+-- end
+-- end
+-- return true
+-- end
+
+actions["reorganize subtables"] = function(data,filename,raw)
+ local resources = data.resources
+ local sequences = { }
+ local lookups = { }
+ local chainedfeatures = { }
+ resources.sequences = sequences
+ resources.lookups = lookups -- we also have lookups in data itself
+ for k=1,#otf.glists do
+ local what = otf.glists[k]
+ local dw = raw[what]
+ if dw then
+ for k=1,#dw do
+ local gk = dw[k]
+ local features = gk.features
+ -- if not features or supported(features) then -- not always features !
+ local typ = gk.type
+ local chain = g_directions[typ] or 0
+ local subtables = gk.subtables
+ if subtables then
+ local t = { }
+ for s=1,#subtables do
+ t[s] = subtables[s].name
+ end
+ subtables = t
+ end
+ local flags, markclass = gk.flags, nil
+ if flags then
+ local t = { -- forcing false packs nicer
+ (flags.ignorecombiningmarks and "mark") or false,
+ (flags.ignoreligatures and "ligature") or false,
+ (flags.ignorebaseglyphs and "base") or false,
+ flags.r2l or false,
+ }
+ markclass = flags.mark_class
+ if markclass then
+ markclass = resources.markclasses[markclass]
+ end
+ flags = t
+ end
+ --
+ local name = gk.name
+ --
+ if not name then
+ -- in fact an error
+ report_otf("skipping weird lookup number %s",k)
+ elseif features then
+ -- scripts, tag, ismac
+ local f = { }
+ local o = { }
+ for i=1,#features do
+ local df = features[i]
+ local tag = strip(lower(df.tag))
+ local ft = f[tag]
+ if not ft then
+ ft = { }
+ f[tag] = ft
+ o[#o+1] = tag
+ end
+ local dscripts = df.scripts
+ for i=1,#dscripts do
+ local d = dscripts[i]
+ local languages = d.langs
+ local script = strip(lower(d.script))
+ local fts = ft[script] if not fts then fts = {} ft[script] = fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))] = true
+ end
+ end
+ end
+ sequences[#sequences+1] = {
+ type = typ,
+ chain = chain,
+ flags = flags,
+ name = name,
+ subtables = subtables,
+ markclass = markclass,
+ features = f,
+ order = o,
+ }
+ else
+ lookups[name] = {
+ type = typ,
+ chain = chain,
+ flags = flags,
+ subtables = subtables,
+ markclass = markclass,
+ }
+ end
+ -- end
+ end
+ end
+ end
+end
+
+actions["prepare lookups"] = function(data,filename,raw)
+ local lookups = raw.lookups
+ if lookups then
+ data.lookups = lookups
+ end
+end
+
+-- The reverse handler does a bit redundant splitting but it's seldom
+-- seen so we don't bother too much. We could store the replacement
+-- in the current list (value instead of true) but it makes other code
+-- uglier. Maybe some day.
+
+local function t_uncover(splitter,cache,covers)
+ local result = { }
+ for n=1,#covers do
+ local cover = covers[n]
+ local uncovered = cache[cover]
+ if not uncovered then
+ uncovered = lpegmatch(splitter,cover)
+ cache[cover] = uncovered
+ end
+ result[n] = uncovered
+ end
+ return result
+end
+
+local function s_uncover(splitter,cache,cover)
+ if cover == "" then
+ return nil
+ else
+ local uncovered = cache[cover]
+ if not uncovered then
+ uncovered = lpegmatch(splitter,cover)
+ -- for i=1,#uncovered do
+ -- uncovered[i] = { [uncovered[i]] = true }
+ -- end
+ cache[cover] = uncovered
+ end
+ return { uncovered }
+ end
+end
+
+local function t_hashed(t,cache)
+ if t then
+ local ht = { }
+ for i=1,#t do
+ local ti = t[i]
+ local tih = cache[ti]
+ if not tih then
+ local tn = #ti
+ if tn == 1 then
+ tih = { [ti[1]] = true }
+ else
+ tih = { }
+ for i=1,tn do
+ tih[ti[i]] = true
+ end
+ end
+ cache[ti] = tih
+ end
+ ht[i] = tih
+ end
+ return ht
+ else
+ return nil
+ end
+end
+
+-- local s_hashed = t_hashed
+
+local function s_hashed(t,cache)
+ if t then
+ local tf = t[1]
+ local nf = #tf
+ if nf == 1 then
+ return { [tf[1]] = true }
+ else
+ local ht = { }
+ for i=1,nf do
+ ht[i] = { [tf[i]] = true }
+ end
+ return ht
+ end
+ else
+ return nil
+ end
+end
+
+local function r_uncover(splitter,cache,cover,replacements)
+ if cover == "" then
+ return nil
+ else
+ -- we always have current as { } even in the case of one
+ local uncovered = cover[1]
+ local replaced = cache[replacements]
+ if not replaced then
+ replaced = lpegmatch(splitter,replacements)
+ cache[replacements] = replaced
+ end
+ local nu, nr = #uncovered, #replaced
+ local r = { }
+ if nu == nr then
+ for i=1,nu do
+ r[uncovered[i]] = replaced[i]
+ end
+ end
+ return r
+ end
+end
+
+actions["reorganize lookups"] = function(data,filename,raw) -- we could check for "" and n == 0
+ -- we prefer the before lookups in a normal order
+ if data.lookups then
+ local helpers = data.helpers
+ local duplicates = data.resources.duplicates
+ local splitter = helpers.tounicodetable
+ local t_u_cache = { }
+ local s_u_cache = t_u_cache -- string keys
+ local t_h_cache = { }
+ local s_h_cache = t_h_cache -- table keys (so we could use one cache)
+ local r_u_cache = { } -- maybe shared
+ helpers.matchcache = t_h_cache -- so that we can add duplicates
+ --
+ for _, lookup in next, data.lookups do
+ local rules = lookup.rules
+ if rules then
+ local format = lookup.format
+ if format == "class" then
+ local before_class = lookup.before_class
+ if before_class then
+ before_class = t_uncover(splitter,t_u_cache,reversed(before_class))
+ end
+ local current_class = lookup.current_class
+ if current_class then
+ current_class = t_uncover(splitter,t_u_cache,current_class)
+ end
+ local after_class = lookup.after_class
+ if after_class then
+ after_class = t_uncover(splitter,t_u_cache,after_class)
+ end
+ for i=1,#rules do
+ local rule = rules[i]
+ local class = rule.class
+ local before = class.before
+ if before then
+ for i=1,#before do
+ before[i] = before_class[before[i]] or { }
+ end
+ rule.before = t_hashed(before,t_h_cache)
+ end
+ local current = class.current
+ local lookups = rule.lookups
+ if current then
+ for i=1,#current do
+ current[i] = current_class[current[i]] or { }
+ -- let's not be sparse
+ if lookups and not lookups[i] then
+ lookups[i] = "" -- (was: false) e.g. we can have two lookups and one replacement
+ end
+ -- end of fix
+ end
+ rule.current = t_hashed(current,t_h_cache)
+ end
+ local after = class.after
+ if after then
+ for i=1,#after do
+ after[i] = after_class[after[i]] or { }
+ end
+ rule.after = t_hashed(after,t_h_cache)
+ end
+ rule.class = nil
+ end
+ lookup.before_class = nil
+ lookup.current_class = nil
+ lookup.after_class = nil
+ lookup.format = "coverage"
+ elseif format == "coverage" then
+ for i=1,#rules do
+ local rule = rules[i]
+ local coverage = rule.coverage
+ if coverage then
+ local before = coverage.before
+ if before then
+ before = t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before = t_hashed(before,t_h_cache)
+ end
+ local current = coverage.current
+ if current then
+ current = t_uncover(splitter,t_u_cache,current)
+ -- let's not be sparse
+ local lookups = rule.lookups
+ if lookups then
+ for i=1,#current do
+ if not lookups[i] then
+ lookups[i] = "" -- fix sparse array
+ end
+ end
+ end
+ --
+ rule.current = t_hashed(current,t_h_cache)
+ end
+ local after = coverage.after
+ if after then
+ after = t_uncover(splitter,t_u_cache,after)
+ rule.after = t_hashed(after,t_h_cache)
+ end
+ rule.coverage = nil
+ end
+ end
+ elseif format == "reversecoverage" then -- special case, single substitution only
+ for i=1,#rules do
+ local rule = rules[i]
+ local reversecoverage = rule.reversecoverage
+ if reversecoverage then
+ local before = reversecoverage.before
+ if before then
+ before = t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before = t_hashed(before,t_h_cache)
+ end
+ local current = reversecoverage.current
+ if current then
+ current = t_uncover(splitter,t_u_cache,current)
+ rule.current = t_hashed(current,t_h_cache)
+ end
+ local after = reversecoverage.after
+ if after then
+ after = t_uncover(splitter,t_u_cache,after)
+ rule.after = t_hashed(after,t_h_cache)
+ end
+ local replacements = reversecoverage.replacements
+ if replacements then
+ rule.replacements = r_uncover(splitter,r_u_cache,current,replacements)
+ end
+ rule.reversecoverage = nil
+ end
+ end
+ elseif format == "glyphs" then
+ -- I could store these more efficient (as not we use a nested tables for before,
+ -- after and current but this features happens so seldom that I don't bother
+ -- about it right now.
+ for i=1,#rules do
+ local rule = rules[i]
+ local glyphs = rule.glyphs
+ if glyphs then
+ local fore = glyphs.fore
+ if fore and fore ~= "" then
+ fore = s_uncover(splitter,s_u_cache,fore)
+ rule.after = s_hashed(fore,s_h_cache)
+ end
+ local back = glyphs.back
+ if back then
+ back = s_uncover(splitter,s_u_cache,back)
+ rule.before = s_hashed(back,s_h_cache)
+ end
+ local names = glyphs.names
+ if names then
+ names = s_uncover(splitter,s_u_cache,names)
+ rule.current = s_hashed(names,s_h_cache)
+ end
+ rule.glyphs = nil
+ local lookups = rule.lookups
+ if lookups then
+ for i=1,#names do
+ if not lookups[i] then
+ lookups[i] = "" -- fix sparse array
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+actions["expand lookups"] = function(data,filename,raw) -- we could check for "" and n == 0
+ if data.lookups then
+ local cache = data.helpers.matchcache
+ if cache then
+ local duplicates = data.resources.duplicates
+ for key, hash in next, cache do
+ local done = nil
+ for key in next, hash do
+ local unicode = duplicates[key]
+ if not unicode then
+ -- no duplicate
+ elseif type(unicode) == "table" then
+ -- multiple duplicates
+ for i=1,#unicode do
+ local u = unicode[i]
+ if hash[u] then
+ -- already in set
+ elseif done then
+ done[u] = key
+ else
+ done = { [u] = key }
+ end
+ end
+ else
+ -- one duplicate
+ if hash[unicode] then
+ -- already in set
+ elseif done then
+ done[unicode] = key
+ else
+ done = { [unicode] = key }
+ end
+ end
+ end
+ if done then
+ for u in next, done do
+ hash[u] = true
+ end
+ end
+ end
+ end
+ end
+end
+
+local function check_variants(unicode,the_variants,splitter,unicodes)
+ local variants = the_variants.variants
+ if variants then -- use splitter
+ local glyphs = lpegmatch(splitter,variants)
+ local done = { [unicode] = true }
+ local n = 0
+ for i=1,#glyphs do
+ local g = glyphs[i]
+ if done[g] then
+ if i > 1 then
+ report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ end
+ else
+ if n == 0 then
+ n = 1
+ variants = { g }
+ else
+ n = n + 1
+ variants[n] = g
+ end
+ done[g] = true
+ end
+ end
+ if n == 0 then
+ variants = nil
+ end
+ end
+ local parts = the_variants.parts
+ if parts then
+ local p = #parts
+ if p > 0 then
+ for i=1,p do
+ local pi = parts[i]
+ pi.glyph = unicodes[pi.component] or 0
+ pi.component = nil
+ end
+ else
+ parts = nil
+ end
+ end
+ local italic = the_variants.italic
+ if italic and italic == 0 then
+ italic = nil
+ end
+ return variants, parts, italic
+end
+
+actions["analyze math"] = function(data,filename,raw)
+ if raw.math then
+ data.metadata.math = raw.math
+ local unicodes = data.resources.unicodes
+ local splitter = data.helpers.tounicodetable
+ for unicode, description in next, data.descriptions do
+ local glyph = description.glyph
+ local mathkerns = glyph.mathkern -- singular
+ local hvariants = glyph.horiz_variants
+ local vvariants = glyph.vert_variants
+ local accent = glyph.top_accent
+ local italic = glyph.italic_correction
+ if mathkerns or hvariants or vvariants or accent or italic then
+ local math = { }
+ if accent then
+ math.accent = accent
+ end
+ if mathkerns then
+ for k, v in next, mathkerns do
+ if not next(v) then
+ mathkerns[k] = nil
+ else
+ for k, v in next, v do
+ if v == 0 then
+ k[v] = nil -- height / kern can be zero
+ end
+ end
+ end
+ end
+ math.kerns = mathkerns
+ end
+ if hvariants then
+ math.hvariants, math.hparts, math.hitalic = check_variants(unicode,hvariants,splitter,unicodes)
+ end
+ if vvariants then
+ math.vvariants, math.vparts, math.vitalic = check_variants(unicode,vvariants,splitter,unicodes)
+ end
+ if italic and italic ~= 0 then
+ math.italic = italic
+ end
+ description.math = math
+ end
+ end
+ end
+end
+
+actions["reorganize glyph kerns"] = function(data,filename,raw)
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local unicodes = resources.unicodes
+ for unicode, description in next, descriptions do
+ local kerns = description.glyph.kerns
+ if kerns then
+ local newkerns = { }
+ for k, kern in next, kerns do
+ local name = kern.char
+ local offset = kern.off
+ local lookup = kern.lookup
+ if name and offset and lookup then
+ local unicode = unicodes[name]
+ if unicode then
+ if type(lookup) == "table" then
+ for l=1,#lookup do
+ local lookup = lookup[l]
+ local lookupkerns = newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode] = offset
+ else
+ newkerns[lookup] = { [unicode] = offset }
+ end
+ end
+ else
+ local lookupkerns = newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode] = offset
+ else
+ newkerns[lookup] = { [unicode] = offset }
+ end
+ end
+ elseif trace_loading then
+ report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode)
+ end
+ end
+ end
+ description.kerns = newkerns
+ end
+ end
+end
+
+actions["merge kern classes"] = function(data,filename,raw)
+ local gposlist = raw.gpos
+ if gposlist then
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local unicodes = resources.unicodes
+ local splitter = data.helpers.tounicodetable
+ local ignored = 0
+ local blocked = 0
+ for gp=1,#gposlist do
+ local gpos = gposlist[gp]
+ local subtables = gpos.subtables
+ if subtables then
+ local first_done = { } -- could become an option so that we can deal with buggy fonts that don't get fixed
+ local split = { } -- saves time .. although probably not that much any more in the fixed luatex kernclass table
+ for s=1,#subtables do
+ local subtable = subtables[s]
+ local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
+ local lookup = subtable.lookup or subtable.name
+ if kernclass then -- the next one is quite slow
+ -- as fas as i can see the kernclass is a table with one entry and offsets
+ -- have no [1] so we could remov eon elevel (kernclass) and start offsets
+ -- at 1 but we're too far down the road now to fix that
+ if #kernclass > 0 then
+ kernclass = kernclass[1]
+ lookup = type(kernclass.lookup) == "string" and kernclass.lookup or lookup
+ report_otf("fixing kernclass table of lookup %a",lookup)
+ end
+ local firsts = kernclass.firsts
+ local seconds = kernclass.seconds
+ local offsets = kernclass.offsets
+ -- if offsets[1] == nil then
+ -- offsets[1] = "" -- defaults ?
+ -- end
+ for n, s in next, firsts do
+ split[s] = split[s] or lpegmatch(splitter,s)
+ end
+ local maxseconds = 0
+ for n, s in next, seconds do
+ if n > maxseconds then
+ maxseconds = n
+ end
+ split[s] = split[s] or lpegmatch(splitter,s)
+ end
+ for fk=1,#firsts do -- maxfirsts ?
+ local fv = firsts[fk]
+ local splt = split[fv]
+ if splt then
+ local extrakerns = { }
+ local baseoffset = (fk-1) * maxseconds
+ -- for sk, sv in next, seconds do
+ for sk=2,maxseconds do
+ local sv = seconds[sk]
+ if sv then
+ local splt = split[sv]
+ if splt then -- redundant test
+ local offset = offsets[baseoffset + sk]
+ if offset then
+ for i=1,#splt do
+ extrakerns[splt[i]] = offset
+ end
+ end
+ end
+ end
+ end
+ for i=1,#splt do
+ local first_unicode = splt[i]
+ if first_done[first_unicode] then
+ report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode)
+ blocked = blocked + 1
+ else
+ first_done[first_unicode] = true
+ local description = descriptions[first_unicode]
+ if description then
+ local kerns = description.kerns
+ if not kerns then
+ kerns = { } -- unicode indexed !
+ description.kerns = kerns
+ end
+ local lookupkerns = kerns[lookup]
+ if not lookupkerns then
+ lookupkerns = { }
+ kerns[lookup] = lookupkerns
+ end
+ if overloadkerns then
+ for second_unicode, kern in next, extrakerns do
+ lookupkerns[second_unicode] = kern
+ end
+ else
+ for second_unicode, kern in next, extrakerns do
+ local k = lookupkerns[second_unicode]
+ if not k then
+ lookupkerns[second_unicode] = kern
+ elseif k ~= kern then
+ if trace_loading then
+ report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern)
+ end
+ ignored = ignored + 1
+ end
+ end
+ end
+ elseif trace_loading then
+ report_otf("no glyph data for %U", first_unicode)
+ end
+ end
+ end
+ end
+ end
+ subtable.kernclass = { }
+ end
+ end
+ end
+ end
+ if ignored > 0 then
+ report_otf("%s kern overloads ignored",ignored)
+ end
+ if blocked > 0 then
+ report_otf("%s successive kerns blocked",blocked)
+ end
+ end
+end
+
+actions["check glyphs"] = function(data,filename,raw)
+ for unicode, description in next, data.descriptions do
+ description.glyph = nil
+ end
+end
+
+-- future versions will remove _
+
+local valid = (R("\x00\x7E") - S("(){}[]<>%/ \n\r\f\v"))^0 * P(-1)
+
+local function valid_ps_name(str)
+ return str and str ~= "" and #str < 64 and lpegmatch(valid,str) and true or false
+end
+
+actions["check metadata"] = function(data,filename,raw)
+ local metadata = data.metadata
+ for _, k in next, mainfields do
+ if valid_fields[k] then
+ local v = raw[k]
+ if not metadata[k] then
+ metadata[k] = v
+ end
+ end
+ end
+ -- metadata.pfminfo = raw.pfminfo -- not already done?
+ local ttftables = metadata.ttf_tables
+ if ttftables then
+ for i=1,#ttftables do
+ ttftables[i].data = "deleted"
+ end
+ end
+ --
+ local names = raw.names
+ --
+ if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
+ -- the ff library does a bit too much (and wrong) checking ... so we need to catch this
+ -- at least for now
+ local function valid(what)
+ if names then
+ for i=1,#names do
+ local list = names[i]
+ local names = list.names
+ if names then
+ local name = names[what]
+ if name and valid_ps_name(name) then
+ return name
+ end
+ end
+ end
+ end
+ end
+ local function check(what)
+ local oldname = metadata[what]
+ if valid_ps_name(oldname) then
+ report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname)
+ else
+ local newname = valid(what)
+ if not newname then
+ newname = formatters["bad-%s-%s"](what,file.nameonly(filename))
+ end
+ local warning = formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname)
+ data.warnings[#data.warnings+1] = warning
+ report_otf(warning)
+ metadata[what] = newname
+ end
+ end
+ check("fontname")
+ check("fullname")
+ end
+ --
+ if names then
+ local psname = metadata.psname
+ if not psname or psname == "" then
+ for i=1,#names do
+ local name = names[i]
+ -- Currently we use the same restricted search as in the new context (specific) font loader
+ -- but we might add more lang checks (it worked ok in the new loaded so now we're in sync)
+ -- This check here is also because there are (esp) cjk fonts out there with psnames different
+ -- from fontnames (gives a bad lookup in backend).
+ if lower(name.lang) == "english (us)" then
+ local specification = name.names
+ if specification then
+ local postscriptname = specification.postscriptname
+ if postscriptname then
+ psname = postscriptname
+ end
+ end
+ end
+ break
+ end
+ end
+ if psname ~= metadata.fontname then
+ report_otf("fontname %a, fullname %a, psname %a",metadata.fontname,metadata.fullname,psname)
+ end
+ metadata.psname = psname
+ end
+ --
+end
+
+actions["cleanup tables"] = function(data,filename,raw)
+ local duplicates = data.resources.duplicates
+ if duplicates then
+ for k, v in next, duplicates do
+ if #v == 1 then
+ duplicates[k] = v[1]
+ end
+ end
+ end
+ data.resources.indices = nil -- not needed
+ data.resources.unicodes = nil -- delayed
+ data.helpers = nil -- tricky as we have no unicodes any more
+end
+
+-- kern: ttf has a table with kerns
+--
+-- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but
+-- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of
+-- unpredictable alternatively we could force an [1] if not set (maybe I will do that
+-- anyway).
+
+-- we can share { } as it is never set
+
+-- ligatures have an extra specification.char entry that we don't use
+
+-- mlookups only with pairs and ligatures
+
+actions["reorganize glyph lookups"] = function(data,filename,raw)
+ local resources = data.resources
+ local unicodes = resources.unicodes
+ local descriptions = data.descriptions
+ local splitter = data.helpers.tounicodelist
+
+ local lookuptypes = resources.lookuptypes
+
+ for unicode, description in next, descriptions do
+ local lookups = description.glyph.lookups
+ if lookups then
+ for tag, lookuplist in next, lookups do
+ for l=1,#lookuplist do
+ local lookup = lookuplist[l]
+ local specification = lookup.specification
+ local lookuptype = lookup.type
+ local lt = lookuptypes[tag]
+ if not lt then
+ lookuptypes[tag] = lookuptype
+ elseif lt ~= lookuptype then
+ report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype)
+ end
+ if lookuptype == "ligature" then
+ lookuplist[l] = { lpegmatch(splitter,specification.components) }
+ elseif lookuptype == "alternate" then
+ lookuplist[l] = { lpegmatch(splitter,specification.components) }
+ elseif lookuptype == "substitution" then
+ lookuplist[l] = unicodes[specification.variant]
+ elseif lookuptype == "multiple" then
+ lookuplist[l] = { lpegmatch(splitter,specification.components) }
+ elseif lookuptype == "position" then
+ lookuplist[l] = {
+ specification.x or 0,
+ specification.y or 0,
+ specification.h or 0,
+ specification.v or 0
+ }
+ elseif lookuptype == "pair" then
+ local one = specification.offsets[1]
+ local two = specification.offsets[2]
+ local paired = unicodes[specification.paired]
+ if one then
+ if two then
+ lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } }
+ else
+ lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } }
+ end
+ else
+ if two then
+ lookuplist[l] = { paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { }
+ else
+ lookuplist[l] = { paired }
+ end
+ end
+ end
+ end
+ end
+ local slookups, mlookups
+ for tag, lookuplist in next, lookups do
+ if #lookuplist == 1 then
+ if slookups then
+ slookups[tag] = lookuplist[1]
+ else
+ slookups = { [tag] = lookuplist[1] }
+ end
+ else
+ if mlookups then
+ mlookups[tag] = lookuplist
+ else
+ mlookups = { [tag] = lookuplist }
+ end
+ end
+ end
+ if slookups then
+ description.slookups = slookups
+ end
+ if mlookups then
+ description.mlookups = mlookups
+ end
+ -- description.lookups = nil
+ end
+ end
+end
+
+local zero = { 0, 0 }
+
+actions["reorganize glyph anchors"] = function(data,filename,raw)
+ local descriptions = data.descriptions
+ for unicode, description in next, descriptions do
+ local anchors = description.glyph.anchors
+ if anchors then
+ for class, data in next, anchors do
+ if class == "baselig" then
+ for tag, specification in next, data do
+ -- for i=1,#specification do
+ -- local si = specification[i]
+ -- specification[i] = { si.x or 0, si.y or 0 }
+ -- end
+ -- can be sparse so we need to fill the holes
+ local n = 0
+ for k, v in next, specification do
+ if k > n then
+ n = k
+ end
+ local x, y = v.x, v.y
+ if x or y then
+ specification[k] = { x or 0, y or 0 }
+ else
+ specification[k] = zero
+ end
+ end
+ local t = { }
+ for i=1,n do
+ t[i] = specification[i] or zero
+ end
+ data[tag] = t -- so # is okay (nicer for packer)
+ end
+ else
+ for tag, specification in next, data do
+ local x, y = specification.x, specification.y
+ if x or y then
+ data[tag] = { x or 0, y or 0 }
+ else
+ data[tag] = zero
+ end
+ end
+ end
+ end
+ description.anchors = anchors
+ end
+ end
+end
+
+local bogusname = (P("uni") + P("u")) * R("AF","09")^4
+ + (P("index") + P("glyph") + S("Ii") * P("dentity") * P(".")^0) * R("09")^1
+local uselessname = (1-bogusname)^0 * bogusname
+
+actions["purge names"] = function(data,filename,raw) -- not used yet
+ if purge_names then
+ local n = 0
+ for u, d in next, data.descriptions do
+ if lpegmatch(uselessname,d.name) then
+ n = n + 1
+ d.name = nil
+ end
+ -- d.comment = nil
+ end
+ if n > 0 then
+ report_otf("%s bogus names removed",n)
+ end
+ end
+end
+
+actions["compact lookups"] = function(data,filename,raw)
+ if not compact_lookups then
+ report_otf("not compacting")
+ return
+ end
+ -- create keyhash
+ local last = 0
+ local tags = table.setmetatableindex({ },
+ function(t,k)
+ last = last + 1
+ t[k] = last
+ return last
+ end
+ )
+ --
+ local descriptions = data.descriptions
+ local resources = data.resources
+ --
+ for u, d in next, descriptions do
+ --
+ -- -- we can also compact anchors and cursives (basechar basemark baselig mark)
+ --
+ local slookups = d.slookups
+ if type(slookups) == "table" then
+ local s = { }
+ for k, v in next, slookups do
+ s[tags[k]] = v
+ end
+ d.slookups = s
+ end
+ --
+ local mlookups = d.mlookups
+ if type(mlookups) == "table" then
+ local m = { }
+ for k, v in next, mlookups do
+ m[tags[k]] = v
+ end
+ d.mlookups = m
+ end
+ --
+ local kerns = d.kerns
+ if type(kerns) == "table" then
+ local t = { }
+ for k, v in next, kerns do
+ t[tags[k]] = v
+ end
+ d.kerns = t
+ end
+ end
+ --
+ local lookups = data.lookups
+ if lookups then
+ local l = { }
+ for k, v in next, lookups do
+ local rules = v.rules
+ if rules then
+ for i=1,#rules do
+ local l = rules[i].lookups
+ if type(l) == "table" then
+ for i=1,#l do
+ l[i] = tags[l[i]]
+ end
+ end
+ end
+ end
+ l[tags[k]] = v
+ end
+ data.lookups = l
+ end
+ --
+ local lookups = resources.lookups
+ if lookups then
+ local l = { }
+ for k, v in next, lookups do
+ local s = v.subtables
+ if type(s) == "table" then
+ for i=1,#s do
+ s[i] = tags[s[i]]
+ end
+ end
+ l[tags[k]] = v
+ end
+ resources.lookups = l
+ end
+ --
+ local sequences = resources.sequences
+ if sequences then
+ for i=1,#sequences do
+ local s = sequences[i]
+ local n = s.name
+ if n then
+ s.name = tags[n]
+ end
+ local t = s.subtables
+ if type(t) == "table" then
+ for i=1,#t do
+ t[i] = tags[t[i]]
+ end
+ end
+ end
+ end
+ --
+ local lookuptypes = resources.lookuptypes
+ if lookuptypes then
+ local l = { }
+ for k, v in next, lookuptypes do
+ l[tags[k]] = v
+ end
+ resources.lookuptypes = l
+ end
+ --
+ local anchor_to_lookup = resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor, lookups in next, anchor_to_lookup do
+ local l = { }
+ for lookup, value in next, lookups do
+ l[tags[lookup]] = value
+ end
+ anchor_to_lookup[anchor] = l
+ end
+ end
+ --
+ local lookup_to_anchor = resources.lookup_to_anchor
+ if lookup_to_anchor then
+ local l = { }
+ for lookup, value in next, lookup_to_anchor do
+ l[tags[lookup]] = value
+ end
+ resources.lookup_to_anchor = l
+ end
+ --
+ tags = table.swapped(tags)
+ --
+ report_otf("%s lookup tags compacted",#tags)
+ --
+ resources.lookuptags = tags
+end
+
+-- modes: node, base, none
+
+function otf.setfeatures(tfmdata,features)
+ local okay = constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
+ if okay then
+ return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
+ else
+ return { } -- will become false
+ end
+end
+
+-- the first version made a top/mid/not extensible table, now we just
+-- pass on the variants data and deal with it in the tfm scaler (there
+-- is no longer an extensible table anyway)
+--
+-- we cannot share descriptions as virtual fonts might extend them (ok,
+-- we could use a cache with a hash
+--
+-- we already assign an empty tabel to characters as we can add for
+-- instance protruding info and loop over characters; one is not supposed
+-- to change descriptions and if one does so one should make a copy!
+
+local function copytotfm(data,cache_id)
+ if data then
+ local metadata = data.metadata
+ local warnings = data.warnings
+ local resources = data.resources
+ local properties = derivetable(data.properties)
+ local descriptions = derivetable(data.descriptions)
+ local goodies = derivetable(data.goodies)
+ local characters = { }
+ local parameters = { }
+ local mathparameters = { }
+ --
+ local pfminfo = metadata.pfminfo or { }
+ local resources = data.resources
+ local unicodes = resources.unicodes
+ -- local mode = data.mode or "base"
+ local spaceunits = 500
+ local spacer = "space"
+ local designsize = metadata.designsize or metadata.design_size or 100
+ local minsize = metadata.minsize or metadata.design_range_bottom or designsize
+ local maxsize = metadata.maxsize or metadata.design_range_top or designsize
+ local mathspecs = metadata.math
+ --
+ if designsize == 0 then
+ designsize = 100
+ minsize = 100
+ maxsize = 100
+ end
+ if mathspecs then
+ for name, value in next, mathspecs do
+ mathparameters[name] = value
+ end
+ end
+ for unicode, _ in next, data.descriptions do -- use parent table
+ characters[unicode] = { }
+ end
+ if mathspecs then
+ -- we could move this to the scaler but not that much is saved
+ -- and this is cleaner
+ for unicode, character in next, characters do
+ local d = descriptions[unicode]
+ local m = d.math
+ if m then
+ -- watch out: luatex uses horiz_variants for the parts
+ --
+ local italic = m.italic
+ local vitalic = m.vitalic
+ --
+ local variants = m.hvariants
+ local parts = m.hparts
+ -- local done = { [unicode] = true }
+ if variants then
+ local c = character
+ for i=1,#variants do
+ local un = variants[i]
+ -- if done[un] then
+ -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode)
+ -- else
+ c.next = un
+ c = characters[un]
+ -- done[un] = true
+ -- end
+ end -- c is now last in chain
+ c.horiz_variants = parts
+ elseif parts then
+ character.horiz_variants = parts
+ italic = m.hitalic
+ end
+ --
+ local variants = m.vvariants
+ local parts = m.vparts
+ -- local done = { [unicode] = true }
+ if variants then
+ local c = character
+ for i=1,#variants do
+ local un = variants[i]
+ -- if done[un] then
+ -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode)
+ -- else
+ c.next = un
+ c = characters[un]
+ -- done[un] = true
+ -- end
+ end -- c is now last in chain
+ c.vert_variants = parts
+ elseif parts then
+ character.vert_variants = parts
+ end
+ --
+ if italic and italic ~= 0 then
+ character.italic = italic -- overload
+ end
+ if vitalic and vitalic ~= 0 then
+ character.vert_italic = vitalic
+ end
+ --
+ local accent = m.accent
+ if accent then
+ character.accent = accent
+ end
+ --
+ local kerns = m.kerns
+ if kerns then
+ character.mathkerns = kerns
+ end
+ end
+ end
+ end
+ -- end math
+ -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?)
+ local filename = constructors.checkedfilename(resources)
+ local fontname = metadata.fontname
+ local fullname = metadata.fullname or fontname
+ local psname = metadata.psname or fontname or fullname
+ local units = metadata.units or metadata.units_per_em or 1000
+ --
+ if units == 0 then -- catch bugs in fonts
+ units = 1000 -- maybe 2000 when ttf
+ metadata.units = 1000
+ report_otf("changing %a units to %a",0,units)
+ end
+ --
+ local monospaced = metadata.monospaced or metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced")
+ local charwidth = pfminfo.avgwidth -- or unset
+ local charxheight = pfminfo.os2_xheight and pfminfo.os2_xheight > 0 and pfminfo.os2_xheight
+-- charwidth = charwidth * units/1000
+-- charxheight = charxheight * units/1000
+ local italicangle = metadata.italicangle
+ properties.monospaced = monospaced
+ parameters.italicangle = italicangle
+ parameters.charwidth = charwidth
+ parameters.charxheight = charxheight
+ --
+ local space = 0x0020
+ local emdash = 0x2014
+ if monospaced then
+ if descriptions[space] then
+ spaceunits, spacer = descriptions[space].width, "space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits, spacer = descriptions[emdash].width, "emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits, spacer = charwidth, "charwidth"
+ end
+ else
+ if descriptions[space] then
+ spaceunits, spacer = descriptions[space].width, "space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits, spacer = descriptions[emdash].width/2, "emdash/2"
+ end
+ if not spaceunits and charwidth then
+ spaceunits, spacer = charwidth, "charwidth"
+ end
+ end
+ spaceunits = tonumber(spaceunits) or 500 -- brrr
+ --
+ parameters.slant = 0
+ parameters.space = spaceunits -- 3.333 (cmr10)
+ parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10)
+ parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10)
+ parameters.x_height = 2*units/5 -- 400
+ parameters.quad = units -- 1000
+ if spaceunits < 2*units/5 then
+ -- todo: warning
+ end
+ if italicangle and italicangle ~= 0 then
+ parameters.italicangle = italicangle
+ parameters.italicfactor = math.cos(math.rad(90+italicangle))
+ parameters.slant = - math.tan(italicangle*math.pi/180)
+ end
+ if monospaced then
+ parameters.space_stretch = 0
+ parameters.space_shrink = 0
+ elseif syncspace then --
+ parameters.space_stretch = spaceunits/2
+ parameters.space_shrink = spaceunits/3
+ end
+ parameters.extra_space = parameters.space_shrink -- 1.111 (cmr10)
+ if charxheight then
+ parameters.x_height = charxheight
+ else
+ local x = 0x0078
+ if x then
+ local x = descriptions[x]
+ if x then
+ parameters.x_height = x.height
+ end
+ end
+ end
+ --
+ parameters.designsize = (designsize/10)*65536
+ parameters.minsize = (minsize /10)*65536
+ parameters.maxsize = (maxsize /10)*65536
+ parameters.ascender = abs(metadata.ascender or metadata.ascent or 0)
+ parameters.descender = abs(metadata.descender or metadata.descent or 0)
+ parameters.units = units
+ --
+ properties.space = spacer
+ properties.encodingbytes = 2
+ properties.format = data.format or otf_format(filename) or formats.otf
+ properties.noglyphnames = true
+ properties.filename = filename
+ properties.fontname = fontname
+ properties.fullname = fullname
+ properties.psname = psname
+ properties.name = filename or fullname
+ --
+ -- properties.name = specification.name
+ -- properties.sub = specification.sub
+ --
+ if warnings and #warnings > 0 then
+ report_otf("warnings for font: %s",filename)
+ report_otf()
+ for i=1,#warnings do
+ report_otf(" %s",warnings[i])
+ end
+ report_otf()
+ end
+ return {
+ characters = characters,
+ descriptions = descriptions,
+ parameters = parameters,
+ mathparameters = mathparameters,
+ resources = resources,
+ properties = properties,
+ goodies = goodies,
+ warnings = warnings,
+ }
+ end
+end
+
+local function otftotfm(specification)
+ local cache_id = specification.hash
+ local tfmdata = containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local name = specification.name
+ local sub = specification.sub
+ local filename = specification.filename
+ -- local format = specification.format
+ local features = specification.features.normal
+ local rawdata = otf.load(filename,sub,features and features.featurefile)
+ if rawdata and next(rawdata) then
+ local descriptions = rawdata.descriptions
+ local duplicates = rawdata.resources.duplicates
+ if duplicates then
+ local nofduplicates, nofduplicated = 0, 0
+ for parent, list in next, duplicates do
+ if type(list) == "table" then
+ local n = #list
+ for i=1,n do
+ local unicode = list[i]
+ if not descriptions[unicode] then
+ descriptions[unicode] = descriptions[parent] -- or copy
+ nofduplicated = nofduplicated + 1
+ end
+ end
+ nofduplicates = nofduplicates + n
+ else
+ if not descriptions[list] then
+ descriptions[list] = descriptions[parent] -- or copy
+ nofduplicated = nofduplicated + 1
+ end
+ nofduplicates = nofduplicates + 1
+ end
+ end
+ if trace_otf and nofduplicated ~= nofduplicates then
+ report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
+ end
+ end
+ rawdata.lookuphash = { }
+ tfmdata = copytotfm(rawdata,cache_id)
+ if tfmdata and next(tfmdata) then
+ -- at this moment no characters are assigned yet, only empty slots
+ local features = constructors.checkedfeatures("otf",features)
+ local shared = tfmdata.shared
+ if not shared then
+ shared = { }
+ tfmdata.shared = shared
+ end
+ shared.rawdata = rawdata
+ -- shared.features = features -- default
+ shared.dynamics = { }
+ -- shared.processes = { }
+ tfmdata.changed = { }
+ shared.features = features
+ shared.processes = otf.setfeatures(tfmdata,features)
+ end
+ end
+ containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+end
+
+local function read_from_otf(specification)
+ local tfmdata = otftotfm(specification)
+ if tfmdata then
+ -- this late ? .. needs checking
+ tfmdata.properties.name = specification.name
+ tfmdata.properties.sub = specification.sub
+ --
+ tfmdata = constructors.scale(tfmdata,specification)
+ local allfeatures = tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
+ constructors.setname(tfmdata,specification) -- only otf?
+ fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
+ end
+ return tfmdata
+end
+
+local function checkmathsize(tfmdata,mathsize)
+ local mathdata = tfmdata.shared.rawdata.metadata.math
+ local mathsize = tonumber(mathsize)
+ if mathdata then -- we cannot use mathparameters as luatex will complain
+ local parameters = tfmdata.parameters
+ parameters.scriptpercentage = mathdata.ScriptPercentScaleDown
+ parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown
+ parameters.mathsize = mathsize
+ end
+end
+
+registerotffeature {
+ name = "mathsize",
+ description = "apply mathsize specified in the font",
+ initializers = {
+ base = checkmathsize,
+ node = checkmathsize,
+ }
+}
+
+-- helpers
+
+function otf.collectlookups(rawdata,kind,script,language)
+ local sequences = rawdata.resources.sequences
+ if sequences then
+ local featuremap, featurelist = { }, { }
+ for s=1,#sequences do
+ local sequence = sequences[s]
+ local features = sequence.features
+ features = features and features[kind]
+ features = features and (features[script] or features[default] or features[wildcard])
+ features = features and (features[language] or features[default] or features[wildcard])
+ if features then
+ local subtables = sequence.subtables
+ if subtables then
+ for s=1,#subtables do
+ local ss = subtables[s]
+ if not featuremap[s] then
+ featuremap[ss] = true
+ featurelist[#featurelist+1] = ss
+ end
+ end
+ end
+ end
+ end
+ if #featurelist > 0 then
+ return featuremap, featurelist
+ end
+ end
+ return nil, nil
+end
+
+-- readers (a bit messy, this forced so I might redo that bit: foo.ttf FOO.ttf foo.TTF FOO.TTF)
+
+local function check_otf(forced,specification,suffix)
+ local name = specification.name
+ if forced then
+ name = specification.forcedname -- messy
+ end
+ local fullname = findbinfile(name,suffix) or ""
+ if fullname == "" then
+ fullname = fonts.names.getfilename(name,suffix) or ""
+ end
+ if fullname ~= "" and not fonts.names.ignoredfile(fullname) then
+ specification.filename = fullname
+ return read_from_otf(specification)
+ end
+end
+
+local function opentypereader(specification,suffix)
+ local forced = specification.forced or ""
+ if formats[forced] then
+ return check_otf(true,specification,forced)
+ else
+ return check_otf(false,specification,suffix)
+ end
+end
+
+readers.opentype = opentypereader -- kind of useless and obsolete
+
+function readers.otf (specification) return opentypereader(specification,"otf") end
+function readers.ttf (specification) return opentypereader(specification,"ttf") end
+function readers.ttc (specification) return opentypereader(specification,"ttf") end
+function readers.dfont(specification) return opentypereader(specification,"ttf") end
+
+-- this will be overloaded
+
+function otf.scriptandlanguage(tfmdata,attr)
+ local properties = tfmdata.properties
+ return properties.script or "dflt", properties.language or "dflt"
+end
+
+-- a little bit of abstraction
+
+local function justset(coverage,unicode,replacement)
+ coverage[unicode] = replacement
+end
+
+otf.coverup = {
+ stepkey = "subtables",
+ actions = {
+ substitution = justset,
+ alternate = justset,
+ multiple = justset,
+ ligature = justset,
+ kern = justset,
+ },
+ register = function(coverage,lookuptype,format,feature,n,descriptions,resources)
+ local name = formatters["ctx_%s_%s"](feature,n)
+ if lookuptype == "kern" then
+ resources.lookuptypes[name] = "position"
+ else
+ resources.lookuptypes[name] = lookuptype
+ end
+ for u, c in next, coverage do
+ local description = descriptions[u]
+ local slookups = description.slookups
+ if slookups then
+ slookups[name] = c
+ else
+ description.slookups = { [name] = c }
+ end
+-- inspect(feature,description)
+ end
+ return name
+ end
+}
+
+-- moved from font-oth.lua
+
+local function getgsub(tfmdata,k,kind)
+ local description = tfmdata.descriptions[k]
+ if description then
+ local slookups = description.slookups -- we assume only slookups (we can always extend)
+ if slookups then
+ local shared = tfmdata.shared
+ local rawdata = shared and shared.rawdata
+ if rawdata then
+ local lookuptypes = rawdata.resources.lookuptypes
+ if lookuptypes then
+ local properties = tfmdata.properties
+ -- we could cache these
+ local validlookups, lookuplist = otf.collectlookups(rawdata,kind,properties.script,properties.language)
+ if validlookups then
+ for l=1,#lookuplist do
+ local lookup = lookuplist[l]
+ local found = slookups[lookup]
+ if found then
+ return found, lookuptypes[lookup]
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+otf.getgsub = getgsub -- returns value, gsub_kind
+
+function otf.getsubstitution(tfmdata,k,kind,value)
+ local found, kind = getgsub(tfmdata,k,kind)
+ if not found then
+ --
+ elseif kind == "substitution" then
+ return found
+ elseif kind == "alternate" then
+ local choice = tonumber(value) or 1 -- no random here (yet)
+ return found[choice] or found[1] or k
+ end
+ return k
+end
+
+otf.getalternate = otf.getsubstitution
+
+function otf.getmultiple(tfmdata,k,kind)
+ local found, kind = getgsub(tfmdata,k,kind)
+ if found and kind == "multiple" then
+ return found
+ end
+ return { k }
+end
+
+function otf.getkern(tfmdata,left,right,kind)
+ local kerns = getgsub(tfmdata,left,kind or "kern",true) -- for now we use getsub
+ if kerns then
+ local found = kerns[right]
+ local kind = type(found)
+ if kind == "table" then
+ found = found[1][3] -- can be more clever
+ elseif kind ~= "number" then
+ found = false
+ end
+ if found then
+ return found * tfmdata.parameters.factor
+ end
+ end
+ return 0
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-oti.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-oti.lua
new file mode 100644
index 00000000000..06c2a42fac2
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-oti.lua
@@ -0,0 +1,91 @@
+if not modules then modules = { } end modules ['font-oti'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local lower = string.lower
+
+local fonts = fonts
+local constructors = fonts.constructors
+
+local otf = constructors.newhandler("otf")
+local otffeatures = constructors.newfeatures("otf")
+local otftables = otf.tables
+local registerotffeature = otffeatures.register
+
+local allocate = utilities.storage.allocate
+
+registerotffeature {
+ name = "features",
+ description = "initialization of feature handler",
+ default = true,
+}
+
+-- these are later hooked into node and base initializaters
+
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode = lower(value)
+ end
+end
+
+local function setlanguage(tfmdata,value)
+ if value then
+ local cleanvalue = lower(value)
+ local languages = otftables and otftables.languages
+ local properties = tfmdata.properties
+ if not languages then
+ properties.language = cleanvalue
+ elseif languages[value] then
+ properties.language = cleanvalue
+ else
+ properties.language = "dflt"
+ end
+ end
+end
+
+local function setscript(tfmdata,value)
+ if value then
+ local cleanvalue = lower(value)
+ local scripts = otftables and otftables.scripts
+ local properties = tfmdata.properties
+ if not scripts then
+ properties.script = cleanvalue
+ elseif scripts[value] then
+ properties.script = cleanvalue
+ else
+ properties.script = "dflt"
+ end
+ end
+end
+
+registerotffeature {
+ name = "mode",
+ description = "mode",
+ initializers = {
+ base = setmode,
+ node = setmode,
+ }
+}
+
+registerotffeature {
+ name = "language",
+ description = "language",
+ initializers = {
+ base = setlanguage,
+ node = setlanguage,
+ }
+}
+
+registerotffeature {
+ name = "script",
+ description = "script",
+ initializers = {
+ base = setscript,
+ node = setscript,
+ }
+}
+
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-otp.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-otp.lua
new file mode 100644
index 00000000000..91bd05b322f
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-otp.lua
@@ -0,0 +1,909 @@
+if not modules then modules = { } end modules ['font-otp'] = {
+ version = 1.001,
+ comment = "companion to font-otf.lua (packing)",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: pack math (but not that much to share)
+--
+-- pitfall 5.2: hashed tables can suddenly become indexed with nil slots
+--
+-- unless we sort all hashes we can get a different pack order (no big deal but size can differ)
+
+local next, type, tostring = next, type, tostring
+local sort, concat = table.sort, table.concat
+
+local trace_packing = false trackers.register("otf.packing", function(v) trace_packing = v end)
+local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+
+local report_otf = logs.reporter("fonts","otf loading")
+
+-- also used in other scripts so we need to check some tables:
+
+fonts = fonts or { }
+
+local handlers = fonts.handlers or { }
+fonts.handlers = handlers
+
+local otf = handlers.otf or { }
+handlers.otf = otf
+
+local enhancers = otf.enhancers or { }
+otf.enhancers = enhancers
+
+local glists = otf.glists or { "gsub", "gpos" }
+otf.glists = glists
+
+local criterium = 1
+local threshold = 0
+
+local function tabstr_normal(t)
+ local s = { }
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
+ if type(v) == "table" then
+ s[n] = k .. ">" .. tabstr_normal(v)
+ elseif v == true then
+ s[n] = k .. "+" -- "=true"
+ elseif v then
+ s[n] = k .. "=" .. v
+ else
+ s[n] = k .. "-" -- "=false"
+ end
+ end
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ return s[1]
+ else
+ sort(s) -- costly but needed (occasional wrong hit otherwise)
+ return concat(s,",")
+ end
+end
+
+local function tabstr_flat(t)
+ local s = { }
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
+ s[n] = k .. "=" .. v
+ end
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ return s[1]
+ else
+ sort(s) -- costly but needed (occasional wrong hit otherwise)
+ return concat(s,",")
+ end
+end
+
+local function tabstr_mixed(t) -- indexed
+ local s = { }
+ local n = #t
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ local k = t[1]
+ if k == true then
+ return "++" -- we need to distinguish from "true"
+ elseif k == false then
+ return "--" -- we need to distinguish from "false"
+ else
+ return tostring(k) -- number or string
+ end
+ else
+ for i=1,n do
+ local k = t[i]
+ if k == true then
+ s[i] = "++" -- we need to distinguish from "true"
+ elseif k == false then
+ s[i] = "--" -- we need to distinguish from "false"
+ else
+ s[i] = k -- number or string
+ end
+ end
+ return concat(s,",")
+ end
+end
+
+local function tabstr_boolean(t)
+ local s = { }
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
+ if v then
+ s[n] = k .. "+"
+ else
+ s[n] = k .. "-"
+ end
+ end
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ return s[1]
+ else
+ sort(s) -- costly but needed (occasional wrong hit otherwise)
+ return concat(s,",")
+ end
+end
+
+-- tabstr_boolean_x = tabstr_boolean
+
+-- tabstr_boolean = function(t)
+-- local a = tabstr_normal(t)
+-- local b = tabstr_boolean_x(t)
+-- print(a)
+-- print(b)
+-- return b
+-- end
+
+-- beware: we cannot unpack and repack the same table because then sharing
+-- interferes (we could catch this if needed) .. so for now: save, reload
+-- and repack in such cases (never needed anyway) .. a tricky aspect is that
+-- we then need to sort more thanks to random hashing
+
+local function packdata(data)
+
+ if data then
+ -- stripdata(data)
+ local h, t, c = { }, { }, { }
+ local hh, tt, cc = { }, { }, { }
+ local nt, ntt = 0, 0
+ local function pack_normal(v)
+ local tag = tabstr_normal(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_flat(v)
+ local tag = tabstr_flat(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_boolean(v)
+ local tag = tabstr_boolean(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_indexed(v)
+ local tag = concat(v," ")
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_mixed(v)
+ local tag = tabstr_mixed(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_final(v)
+ -- v == number
+ if c[v] <= criterium then
+ return t[v]
+ else
+ -- compact hash
+ local hv = hh[v]
+ if hv then
+ return hv
+ else
+ ntt = ntt + 1
+ tt[ntt] = t[v]
+ hh[v] = ntt
+ cc[ntt] = c[v]
+ return ntt
+ end
+ end
+ end
+ local function success(stage,pass)
+ if nt == 0 then
+ if trace_loading or trace_packing then
+ report_otf("pack quality: nothing to pack")
+ end
+ return false
+ elseif nt >= threshold then
+ local one, two, rest = 0, 0, 0
+ if pass == 1 then
+ for k,v in next, c do
+ if v == 1 then
+ one = one + 1
+ elseif v == 2 then
+ two = two + 1
+ else
+ rest = rest + 1
+ end
+ end
+ else
+ for k,v in next, cc do
+ if v > 20 then
+ rest = rest + 1
+ elseif v > 10 then
+ two = two + 1
+ else
+ one = one + 1
+ end
+ end
+ data.tables = tt
+ end
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)", stage, pass, one+two+rest, one, two, rest, criterium)
+ end
+ return true
+ else
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)", stage, pass, nt, threshold)
+ end
+ return false
+ end
+ end
+ local function packers(pass)
+ if pass == 1 then
+ return pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed
+ else
+ return pack_final, pack_final, pack_final, pack_final, pack_final
+ end
+ end
+ local resources = data.resources
+ local lookuptypes = resources.lookuptypes
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 1, pass %s",pass)
+ end
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
+ for unicode, description in next, data.descriptions do
+ local boundingbox = description.boundingbox
+ if boundingbox then
+ description.boundingbox = pack_indexed(boundingbox)
+ end
+ local slookups = description.slookups
+ if slookups then
+ for tag, slookup in next, slookups do
+ local what = lookuptypes[tag]
+ if what == "pair" then
+ local t = slookup[2] if t then slookup[2] = pack_indexed(t) end
+ local t = slookup[3] if t then slookup[3] = pack_indexed(t) end
+ elseif what ~= "substitution" then
+ slookups[tag] = pack_indexed(slookup) -- true is new
+ end
+ end
+ end
+ local mlookups = description.mlookups
+ if mlookups then
+ for tag, mlookup in next, mlookups do
+ local what = lookuptypes[tag]
+ if what == "pair" then
+ for i=1,#mlookup do
+ local lookup = mlookup[i]
+ local t = lookup[2] if t then lookup[2] = pack_indexed(t) end
+ local t = lookup[3] if t then lookup[3] = pack_indexed(t) end
+ end
+ elseif what ~= "substitution" then
+ for i=1,#mlookup do
+ mlookup[i] = pack_indexed(mlookup[i]) -- true is new
+ end
+ end
+ end
+ end
+ local kerns = description.kerns
+ if kerns then
+ for tag, kern in next, kerns do
+ kerns[tag] = pack_flat(kern)
+ end
+ end
+ local math = description.math
+ if math then
+ local kerns = math.kerns
+ if kerns then
+ for tag, kern in next, kerns do
+ kerns[tag] = pack_normal(kern)
+ end
+ end
+ end
+ local anchors = description.anchors
+ if anchors then
+ for what, anchor in next, anchors do
+ if what == "baselig" then
+ for _, a in next, anchor do
+ for k=1,#a do
+ a[k] = pack_indexed(a[k])
+ end
+ end
+ else
+ for k, v in next, anchor do
+ anchor[k] = pack_indexed(v)
+ end
+ end
+ end
+ end
+ local altuni = description.altuni
+ if altuni then
+ for i=1,#altuni do
+ altuni[i] = pack_flat(altuni[i])
+ end
+ end
+ end
+ local lookups = data.lookups
+ if lookups then
+ for _, lookup in next, lookups do
+ local rules = lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule = rules[i]
+ local r = rule.before if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
+ local r = rule.after if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
+ local r = rule.current if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
+ local r = rule.replacements if r then rule.replacements = pack_flat (r) end -- can have holes
+ local r = rule.lookups if r then rule.lookups = pack_indexed(r) end -- can have ""
+ -- local r = rule.lookups if r then rule.lookups = pack_flat(r) end -- can have holes (already taken care of some cases)
+ end
+ end
+ end
+ end
+ local anchor_to_lookup = resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor, lookup in next, anchor_to_lookup do
+ anchor_to_lookup[anchor] = pack_normal(lookup)
+ end
+ end
+ local lookup_to_anchor = resources.lookup_to_anchor
+ if lookup_to_anchor then
+ for lookup, anchor in next, lookup_to_anchor do
+ lookup_to_anchor[lookup] = pack_normal(anchor)
+ end
+ end
+ local sequences = resources.sequences
+ if sequences then
+ for feature, sequence in next, sequences do
+ local flags = sequence.flags
+ if flags then
+ sequence.flags = pack_normal(flags)
+ end
+ local subtables = sequence.subtables
+ if subtables then
+ sequence.subtables = pack_normal(subtables)
+ end
+ local features = sequence.features
+ if features then
+ for script, feature in next, features do
+ features[script] = pack_normal(feature)
+ end
+ end
+ local order = sequence.order
+ if order then
+ sequence.order = pack_indexed(order)
+ end
+ local markclass = sequence.markclass
+ if markclass then
+ sequence.markclass = pack_boolean(markclass)
+ end
+ end
+ end
+ local lookups = resources.lookups
+ if lookups then
+ for name, lookup in next, lookups do
+ local flags = lookup.flags
+ if flags then
+ lookup.flags = pack_normal(flags)
+ end
+ local subtables = lookup.subtables
+ if subtables then
+ lookup.subtables = pack_normal(subtables)
+ end
+ end
+ end
+ local features = resources.features
+ if features then
+ for _, what in next, glists do
+ local list = features[what]
+ if list then
+ for feature, spec in next, list do
+ list[feature] = pack_normal(spec)
+ end
+ end
+ end
+ end
+ if not success(1,pass) then
+ return
+ end
+ end
+ if nt > 0 then
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 2, pass %s",pass)
+ end
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
+ for unicode, description in next, data.descriptions do
+ local kerns = description.kerns
+ if kerns then
+ description.kerns = pack_normal(kerns)
+ end
+ local math = description.math
+ if math then
+ local kerns = math.kerns
+ if kerns then
+ math.kerns = pack_normal(kerns)
+ end
+ end
+ local anchors = description.anchors
+ if anchors then
+ description.anchors = pack_normal(anchors)
+ end
+ local mlookups = description.mlookups
+ if mlookups then
+ for tag, mlookup in next, mlookups do
+ mlookups[tag] = pack_normal(mlookup)
+ end
+ end
+ local altuni = description.altuni
+ if altuni then
+ description.altuni = pack_normal(altuni)
+ end
+ end
+ local lookups = data.lookups
+ if lookups then
+ for _, lookup in next, lookups do
+ local rules = lookup.rules
+ if rules then
+ for i=1,#rules do -- was next loop
+ local rule = rules[i]
+ local r = rule.before if r then rule.before = pack_normal(r) end
+ local r = rule.after if r then rule.after = pack_normal(r) end
+ local r = rule.current if r then rule.current = pack_normal(r) end
+ end
+ end
+ end
+ end
+ local sequences = resources.sequences
+ if sequences then
+ for feature, sequence in next, sequences do
+ sequence.features = pack_normal(sequence.features)
+ end
+ end
+ if not success(2,pass) then
+ -- return
+ end
+ end
+
+ for pass=1,2 do
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
+ for unicode, description in next, data.descriptions do
+ local slookups = description.slookups
+ if slookups then
+ description.slookups = pack_normal(slookups)
+ end
+ local mlookups = description.mlookups
+ if mlookups then
+ description.mlookups = pack_normal(mlookups)
+ end
+ end
+ end
+
+ end
+ end
+end
+
+local unpacked_mt = {
+ __index =
+ function(t,k)
+ t[k] = false
+ return k -- next time true
+ end
+}
+
+local function unpackdata(data)
+
+ if data then
+ local tables = data.tables
+ if tables then
+ local resources = data.resources
+ local lookuptypes = resources.lookuptypes
+ local unpacked = { }
+ setmetatable(unpacked,unpacked_mt)
+ for unicode, description in next, data.descriptions do
+ local tv = tables[description.boundingbox]
+ if tv then
+ description.boundingbox = tv
+ end
+ local slookups = description.slookups
+ if slookups then
+ local tv = tables[slookups]
+ if tv then
+ description.slookups = tv
+ slookups = unpacked[tv]
+ end
+ if slookups then
+ for tag, lookup in next, slookups do
+ local what = lookuptypes[tag]
+ if what == "pair" then
+ local tv = tables[lookup[2]]
+ if tv then
+ lookup[2] = tv
+ end
+ local tv = tables[lookup[3]]
+ if tv then
+ lookup[3] = tv
+ end
+ elseif what ~= "substitution" then
+ local tv = tables[lookup]
+ if tv then
+ slookups[tag] = tv
+ end
+ end
+ end
+ end
+ end
+ local mlookups = description.mlookups
+ if mlookups then
+ local tv = tables[mlookups]
+ if tv then
+ description.mlookups = tv
+ mlookups = unpacked[tv]
+ end
+ if mlookups then
+ for tag, list in next, mlookups do
+ local tv = tables[list]
+ if tv then
+ mlookups[tag] = tv
+ list = unpacked[tv]
+ end
+ if list then
+ local what = lookuptypes[tag]
+ if what == "pair" then
+ for i=1,#list do
+ local lookup = list[i]
+ local tv = tables[lookup[2]]
+ if tv then
+ lookup[2] = tv
+ end
+ local tv = tables[lookup[3]]
+ if tv then
+ lookup[3] = tv
+ end
+ end
+ elseif what ~= "substitution" then
+ for i=1,#list do
+ local tv = tables[list[i]]
+ if tv then
+ list[i] = tv
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ local kerns = description.kerns
+ if kerns then
+ local tm = tables[kerns]
+ if tm then
+ description.kerns = tm
+ kerns = unpacked[tm]
+ end
+ if kerns then
+ for k, kern in next, kerns do
+ local tv = tables[kern]
+ if tv then
+ kerns[k] = tv
+ end
+ end
+ end
+ end
+ local math = description.math
+ if math then
+ local kerns = math.kerns
+ if kerns then
+ local tm = tables[kerns]
+ if tm then
+ math.kerns = tm
+ kerns = unpacked[tm]
+ end
+ if kerns then
+ for k, kern in next, kerns do
+ local tv = tables[kern]
+ if tv then
+ kerns[k] = tv
+ end
+ end
+ end
+ end
+ end
+ local anchors = description.anchors
+ if anchors then
+ local ta = tables[anchors]
+ if ta then
+ description.anchors = ta
+ anchors = unpacked[ta]
+ end
+ if anchors then
+ for tag, anchor in next, anchors do
+ if tag == "baselig" then
+ for _, list in next, anchor do
+ for i=1,#list do
+ local tv = tables[list[i]]
+ if tv then
+ list[i] = tv
+ end
+ end
+ end
+ else
+ for a, data in next, anchor do
+ local tv = tables[data]
+ if tv then
+ anchor[a] = tv
+ end
+ end
+ end
+ end
+ end
+ end
+ local altuni = description.altuni
+ if altuni then
+ local altuni = tables[altuni]
+ if altuni then
+ description.altuni = altuni
+ for i=1,#altuni do
+ local tv = tables[altuni[i]]
+ if tv then
+ altuni[i] = tv
+ end
+ end
+ end
+ end
+ end
+ local lookups = data.lookups
+ if lookups then
+ for _, lookup in next, lookups do
+ local rules = lookup.rules
+ if rules then
+ for i=1,#rules do -- was next loop
+ local rule = rules[i]
+ local before = rule.before
+ if before then
+ local tv = tables[before]
+ if tv then
+ rule.before = tv
+ before = unpacked[tv]
+ end
+ if before then
+ for i=1,#before do
+ local tv = tables[before[i]]
+ if tv then
+ before[i] = tv
+ end
+ end
+ end
+ end
+ local after = rule.after
+ if after then
+ local tv = tables[after]
+ if tv then
+ rule.after = tv
+ after = unpacked[tv]
+ end
+ if after then
+ for i=1,#after do
+ local tv = tables[after[i]]
+ if tv then
+ after[i] = tv
+ end
+ end
+ end
+ end
+ local current = rule.current
+ if current then
+ local tv = tables[current]
+ if tv then
+ rule.current = tv
+ current = unpacked[tv]
+ end
+ if current then
+ for i=1,#current do
+ local tv = tables[current[i]]
+ if tv then
+ current[i] = tv
+ end
+ end
+ end
+ end
+ local replacements = rule.replacements
+ if replacements then
+ local tv = tables[replacements]
+ if tv then
+ rule.replacements = tv
+ end
+ end
+ -- local fore = rule.fore
+ -- if fore then
+ -- local tv = tables[fore]
+ -- if tv then
+ -- rule.fore = tv
+ -- end
+ -- end
+ -- local back = rule.back
+ -- if back then
+ -- local tv = tables[back]
+ -- if tv then
+ -- rule.back = tv
+ -- end
+ -- end
+ -- local names = rule.names
+ -- if names then
+ -- local tv = tables[names]
+ -- if tv then
+ -- rule.names = tv
+ -- end
+ -- end
+ --
+ local lookups = rule.lookups
+ if lookups then
+ local tv = tables[lookups]
+ if tv then
+ rule.lookups = tv
+ end
+ end
+ end
+ end
+ end
+ end
+ local anchor_to_lookup = resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor, lookup in next, anchor_to_lookup do
+ local tv = tables[lookup]
+ if tv then
+ anchor_to_lookup[anchor] = tv
+ end
+ end
+ end
+ local lookup_to_anchor = resources.lookup_to_anchor
+ if lookup_to_anchor then
+ for lookup, anchor in next, lookup_to_anchor do
+ local tv = tables[anchor]
+ if tv then
+ lookup_to_anchor[lookup] = tv
+ end
+ end
+ end
+ local ls = resources.sequences
+ if ls then
+ for _, feature in next, ls do
+ local flags = feature.flags
+ if flags then
+ local tv = tables[flags]
+ if tv then
+ feature.flags = tv
+ end
+ end
+ local subtables = feature.subtables
+ if subtables then
+ local tv = tables[subtables]
+ if tv then
+ feature.subtables = tv
+ end
+ end
+ local features = feature.features
+ if features then
+ local tv = tables[features]
+ if tv then
+ feature.features = tv
+ features = unpacked[tv]
+ end
+ if features then
+ for script, data in next, features do
+ local tv = tables[data]
+ if tv then
+ features[script] = tv
+ end
+ end
+ end
+ end
+ local order = feature.order
+ if order then
+ local tv = tables[order]
+ if tv then
+ feature.order = tv
+ end
+ end
+ local markclass = feature.markclass
+ if markclass then
+ local tv = tables[markclass]
+ if tv then
+ feature.markclass = tv
+ end
+ end
+ end
+ end
+ local lookups = resources.lookups
+ if lookups then
+ for _, lookup in next, lookups do
+ local flags = lookup.flags
+ if flags then
+ local tv = tables[flags]
+ if tv then
+ lookup.flags = tv
+ end
+ end
+ local subtables = lookup.subtables
+ if subtables then
+ local tv = tables[subtables]
+ if tv then
+ lookup.subtables = tv
+ end
+ end
+ end
+ end
+ local features = resources.features
+ if features then
+ for _, what in next, glists do
+ local feature = features[what]
+ if feature then
+ for tag, spec in next, feature do
+ local tv = tables[spec]
+ if tv then
+ feature[tag] = tv
+ end
+ end
+ end
+ end
+ end
+ data.tables = nil
+ end
+ end
+end
+
+if otf.enhancers.register then
+
+ otf.enhancers.register( "pack", packdata)
+ otf.enhancers.register("unpack",unpackdata)
+
+-- todo: directive
+
+end
+
+otf.enhancers.unpack = unpackdata -- used elsewhere
+otf.enhancers.pack = packdata -- used elsewhere
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-tfm.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-tfm.lua
new file mode 100644
index 00000000000..2dd57684939
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-font-tfm.lua
@@ -0,0 +1,198 @@
+if not modules then modules = { } end modules ['font-tfm'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local next = next
+local match = string.match
+
+local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+local trace_features = false trackers.register("tfm.features", function(v) trace_features = v end)
+
+local report_defining = logs.reporter("fonts","defining")
+local report_tfm = logs.reporter("fonts","tfm loading")
+
+local findbinfile = resolvers.findbinfile
+
+local fonts = fonts
+local handlers = fonts.handlers
+local readers = fonts.readers
+local constructors = fonts.constructors
+local encodings = fonts.encodings
+
+local tfm = constructors.newhandler("tfm")
+tfm.version = 1.000
+tfm.maxnestingdepth = 5
+tfm.maxnestingsize = 65536*1024
+
+local tfmfeatures = constructors.newfeatures("tfm")
+local registertfmfeature = tfmfeatures.register
+
+constructors.resolvevirtualtoo = false -- wil be set in font-ctx.lua
+
+fonts.formats.tfm = "type1" -- we need to have at least a value here
+
+--[[ldx--
+<p>The next function encapsulates the standard <l n='tfm'/> loader as
+supplied by <l n='luatex'/>.</p>
+--ldx]]--
+
+-- this might change: not scaling and then apply features and do scaling in the
+-- usual way with dummy descriptions but on the other hand .. we no longer use
+-- tfm so why bother
+
+-- ofm directive blocks local path search unless set; btw, in context we
+-- don't support ofm files anyway as this format is obsolete
+
+-- we need to deal with nested virtual fonts, but because we load in the
+-- frontend we also need to make sure we don't nest too deep (esp when sizes
+-- get large)
+--
+-- (VTITLE Example of a recursion)
+-- (MAPFONT D 0 (FONTNAME recurse)(FONTAT D 2))
+-- (CHARACTER C A (CHARWD D 1)(CHARHT D 1)(MAP (SETRULE D 1 D 1)))
+-- (CHARACTER C B (CHARWD D 2)(CHARHT D 2)(MAP (SETCHAR C A)))
+-- (CHARACTER C C (CHARWD D 4)(CHARHT D 4)(MAP (SETCHAR C B)))
+--
+-- we added the same checks as below to the luatex engine
+
+function tfm.setfeatures(tfmdata,features)
+ local okay = constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm)
+ if okay then
+ return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm)
+ else
+ return { } -- will become false
+ end
+end
+
+local depth = { } -- table.setmetatableindex("number")
+
+local function read_from_tfm(specification)
+ local filename = specification.filename
+ local size = specification.size
+ depth[filename] = (depth[filename] or 0) + 1
+ if trace_defining then
+ report_defining("loading tfm file %a at size %s",filename,size)
+ end
+ local tfmdata = font.read_tfm(filename,size) -- not cached, fast enough
+ if tfmdata then
+ local features = specification.features and specification.features.normal or { }
+ local resources = tfmdata.resources or { }
+ local properties = tfmdata.properties or { }
+ local parameters = tfmdata.parameters or { }
+ local shared = tfmdata.shared or { }
+ properties.name = tfmdata.name
+ properties.fontname = tfmdata.fontname
+ properties.psname = tfmdata.psname
+ properties.filename = specification.filename
+ properties.format = fonts.formats.tfm -- better than nothing
+ parameters.size = size
+ --
+ tfmdata.properties = properties
+ tfmdata.resources = resources
+ tfmdata.parameters = parameters
+ tfmdata.shared = shared
+ --
+ shared.rawdata = { }
+ shared.features = features
+ shared.processes = next(features) and tfm.setfeatures(tfmdata,features) or nil
+ parameters.slant = parameters.slant or parameters[1] or 0
+ parameters.space = parameters.space or parameters[2] or 0
+ parameters.space_stretch = parameters.space_stretch or parameters[3] or 0
+ parameters.space_shrink = parameters.space_shrink or parameters[4] or 0
+ parameters.x_height = parameters.x_height or parameters[5] or 0
+ parameters.quad = parameters.quad or parameters[6] or 0
+ parameters.extra_space = parameters.extra_space or parameters[7] or 0
+ --
+ constructors.enhanceparameters(parameters) -- official copies for us
+ --
+ if constructors.resolvevirtualtoo then
+ fonts.loggers.register(tfmdata,file.suffix(filename),specification) -- strange, why here
+ local vfname = findbinfile(specification.name, 'ovf')
+ if vfname and vfname ~= "" then
+ local vfdata = font.read_vf(vfname,size) -- not cached, fast enough
+ if vfdata then
+ local chars = tfmdata.characters
+ for k,v in next, vfdata.characters do
+ chars[k].commands = v.commands
+ end
+ properties.virtualized = true
+ tfmdata.fonts = vfdata.fonts
+ tfmdata.type = "virtual" -- else nested calls with cummulative scaling
+ local fontlist = vfdata.fonts
+ local name = file.nameonly(filename)
+ for i=1,#fontlist do
+ local n = fontlist[i].name
+ local s = fontlist[i].size
+ local d = depth[filename]
+ s = constructors.scaled(s,vfdata.designsize)
+ if d > tfm.maxnestingdepth then
+ report_defining("too deeply nested virtual font %a with size %a, max nesting depth %s",n,s,tfm.maxnestingdepth)
+ fontlist[i] = { id = 0 }
+ elseif (d > 1) and (s > tfm.maxnestingsize) then
+ report_defining("virtual font %a exceeds size %s",n,s)
+ fontlist[i] = { id = 0 }
+ else
+ local t, id = fonts.constructors.readanddefine(n,s)
+ fontlist[i] = { id = id }
+ end
+ end
+ end
+ end
+ end
+ --
+ local allfeatures = tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm)
+ if not features.encoding then
+ local encoding, filename = match(properties.filename,"^(.-)%-(.*)$") -- context: encoding-name.*
+ if filename and encoding and encodings.known and encodings.known[encoding] then
+ features.encoding = encoding
+ end
+ end
+ -- let's play safe:
+ properties.haskerns = true
+ properties.haslogatures = true
+ resources.unicodes = { }
+ resources.lookuptags = { }
+ --
+ depth[filename] = depth[filename] - 1
+ return tfmdata
+ else
+ depth[filename] = depth[filename] - 1
+ end
+end
+
+local function check_tfm(specification,fullname) -- we could split up like afm/otf
+ local foundname = findbinfile(fullname, 'tfm') or ""
+ if foundname == "" then
+ foundname = findbinfile(fullname, 'ofm') or "" -- not needed in context
+ end
+ if foundname == "" then
+ foundname = fonts.names.getfilename(fullname,"tfm") or ""
+ end
+ if foundname ~= "" then
+ specification.filename = foundname
+ specification.format = "ofm"
+ return read_from_tfm(specification)
+ elseif trace_defining then
+ report_defining("loading tfm with name %a fails",specification.name)
+ end
+end
+
+readers.check_tfm = check_tfm
+
+function readers.tfm(specification)
+ local fullname = specification.filename or ""
+ if fullname == "" then
+ local forced = specification.forced or ""
+ if forced ~= "" then
+ fullname = specification.name .. "." .. forced
+ else
+ fullname = specification.name
+ end
+ end
+ return check_tfm(specification,fullname)
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-cbk.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-cbk.lua
new file mode 100644
index 00000000000..9da8151de2b
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-cbk.lua
@@ -0,0 +1,220 @@
+if not modules then modules = { } end modules ['luatex-fonts-cbk'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+local nodes = nodes
+
+-- Fonts: (might move to node-gef.lua)
+
+local traverse_id = node.traverse_id
+local free_node = node.free
+local remove_node = node.remove
+
+local glyph_code = nodes.nodecodes.glyph
+local disc_code = nodes.nodecodes.disc
+
+-- from now on we apply ligaturing and kerning here because it might interfere with complex
+-- opentype discretionary handling where the base ligature pass expect some weird extra
+-- pointers (which then confuse the tail slider that has some checking built in)
+
+local ligaturing = node.ligaturing
+local kerning = node.kerning
+
+local basepass = true
+
+local function l_warning() texio.write_nl("warning: node.ligaturing called directly") l_warning = nil end
+local function k_warning() texio.write_nl("warning: node.kerning called directly") k_warning = nil end
+
+function node.ligaturing(...)
+ if basepass and l_warning then
+ l_warning()
+ end
+ return ligaturing(...)
+end
+
+function node.kerning(...)
+ if basepass and k_warning then
+ k_warning()
+ end
+ return kerning(...)
+end
+
+function nodes.handlers.setbasepass(v)
+ basepass = v
+end
+
+function nodes.handlers.nodepass(head)
+ local fontdata = fonts.hashes.identifiers
+ if fontdata then
+ local usedfonts = { }
+ local basefonts = { }
+ local prevfont = nil
+ local basefont = nil
+ local variants = nil
+ local redundant = nil
+ for n in traverse_id(glyph_code,head) do
+ local font = n.font
+ if font ~= prevfont then
+ if basefont then
+ basefont[2] = n.prev
+ end
+ prevfont = font
+ local used = usedfonts[font]
+ if not used then
+ local tfmdata = fontdata[font] --
+ if tfmdata then
+ local shared = tfmdata.shared -- we need to check shared, only when same features
+ if shared then
+ local processors = shared.processes
+ if processors and #processors > 0 then
+ usedfonts[font] = processors
+ elseif basepass then
+ basefont = { n, nil }
+ basefonts[#basefonts+1] = basefont
+ end
+ end
+ local resources = tfmdata.resources
+ variants = resources and resources.variants
+ variants = variants and next(variants) and variants or false
+ end
+ else
+ local tfmdata = fontdata[prevfont]
+ if tfmdata then
+ local resources = tfmdata.resources
+ variants = resources and resources.variants
+ variants = variants and next(variants) and variants or false
+ end
+ end
+ end
+ if variants then
+ local char = n.char
+ if char >= 0xFE00 and (char <= 0xFE0F or (char >= 0xE0100 and char <= 0xE01EF)) then
+ local hash = variants[char]
+ if hash then
+ local p = n.prev
+ if p and p.id == glyph_code then
+ local variant = hash[p.char]
+ if variant then
+ p.char = variant
+ if not redundant then
+ redundant = { n }
+ else
+ redundant[#redundant+1] = n
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if redundant then
+ for i=1,#redundant do
+ local n = redundant[i]
+ remove_node(head,n)
+ free_node(n)
+ end
+ end
+ for d in traverse_id(disc_code,head) do
+ local r = d.replace
+ if r then
+ for n in traverse_id(glyph_code,r) do
+ local font = n.font
+ if font ~= prevfont then
+ prevfont = font
+ local used = usedfonts[font]
+ if not used then
+ local tfmdata = fontdata[font] --
+ if tfmdata then
+ local shared = tfmdata.shared -- we need to check shared, only when same features
+ if shared then
+ local processors = shared.processes
+ if processors and #processors > 0 then
+ usedfonts[font] = processors
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if next(usedfonts) then
+ for font, processors in next, usedfonts do
+ for i=1,#processors do
+ head = processors[i](head,font,0) or head
+ end
+ end
+ end
+ if basepass and #basefonts > 0 then
+ for i=1,#basefonts do
+ local range = basefonts[i]
+ local start = range[1]
+ local stop = range[2]
+ -- maybe even: if start and start ~= stop then
+ if start or stop then
+ local prev = nil
+ local next = nil
+ local front = start == head
+ if stop then
+ next = stop.next
+ start, stop = ligaturing(start,stop)
+ start, stop = kerning(start,stop)
+ elseif start then
+ prev = start.prev
+ start = ligaturing(start)
+ start = kerning(start)
+ end
+ if prev then
+ start.prev = prev
+ prev.next = start
+ end
+ if next then
+ stop.next = next
+ next.prev = stop
+ end
+ if front then
+ head = start
+ end
+ end
+ end
+ end
+ return head, true
+ else
+ return head, false
+ end
+end
+
+function nodes.handlers.basepass(head)
+ if basepass then
+ head = ligaturing(head)
+ head = kerning(head)
+ end
+ return head, true
+end
+
+local nodepass = nodes.handlers.nodepass
+local basepass = nodes.handlers.basepass
+local injectpass = nodes.injections.handler
+local protectpass = nodes.handlers.protectglyphs
+
+function nodes.simple_font_handler(head)
+ if head then
+ head = nodepass(head)
+ head = injectpass(head)
+ head = basepass(head)
+ protectpass(head)
+ return head, true
+ else
+ return head, false
+ end
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-def.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-def.lua
index 0c2f0dbd583..0c2f0dbd583 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-def.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-def.lua
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-demo-vf-1.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-demo-vf-1.lua
new file mode 100644
index 00000000000..13acd16ca90
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-demo-vf-1.lua
@@ -0,0 +1,44 @@
+local identifiers = fonts.hashes.identifiers
+
+return function(specification)
+ local f1, id1 = fonts.constructors.readanddefine('lmroman10-regular', specification.size)
+ local f2, id2 = fonts.constructors.readanddefine('lmsans10-regular', specification.size)
+ local f3, id3 = fonts.constructors.readanddefine('lmtypewriter10-regular',specification.size)
+ if f1 and f2 and f3 then
+ f1.properties.name = specification.name
+ f1.properties.virtualized = true
+ f1.fonts = {
+ { id = id1 },
+ { id = id2 },
+ { id = id3 },
+ }
+ local color = { [0] =
+ { "special", "pdf:0 g" },
+ { "special", "pdf:1 0 0 rg" },
+ { "special", "pdf:0 1 0 rg" },
+ { "special", "pdf:0 0 1 rg" },
+ { "special", "pdf:0 0 1 rg" },
+ }
+ local chars = {
+ identifiers[id1].characters,
+ identifiers[id2].characters,
+ identifiers[id3].characters,
+ }
+ for u, v in next, f1.characters do
+ local n = math.floor(math.random(1,3)+0.5)
+ local c = chars[n][u] or v
+ v.commands = {
+ color[n],
+ { 'slot', n, u },
+ color[0],
+ { 'nop' }
+ }
+ v.kerns = nil
+ v.width = c.width
+ v.height = c.height
+ v.depth = c.depth
+ v.italic = nil
+ end
+ end
+ return f1
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-enc.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-enc.lua
index e20c3a03b54..2e1c6a46674 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-enc.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-enc.lua
@@ -11,9 +11,10 @@ if context then
os.exit()
end
-local fonts = fonts
-fonts.encodings = { }
-fonts.encodings.agl = { }
+local fonts = fonts
+fonts.encodings = { }
+fonts.encodings.agl = { }
+fonts.encodings.known = { }
setmetatable(fonts.encodings.agl, { __index = function(t,k)
if k == "unicodes" then
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-ext.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-ext.lua
index b60d0451228..b60d0451228 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-ext.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-ext.lua
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-inj.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-inj.lua
new file mode 100644
index 00000000000..36781f72f34
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-inj.lua
@@ -0,0 +1,1152 @@
+if not modules then modules = { } end modules ['font-inj'] = {
+ version = 1.001,
+ comment = "companion to font-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This property based variant is not faster but looks nicer than the attribute one. We
+-- need to use rawget (which is apbout 4 times slower than a direct access but we cannot
+-- get/set that one for our purpose! This version does a bit more with discretionaries
+-- (and Kai has tested it with his collection of weird fonts.)
+
+-- There is some duplicate code here (especially in the the pre/post/replace branches) but
+-- we go for speed. We could store a list of glyph and mark nodes when registering but it's
+-- cleaner to have an identification pass here. Also, I need to keep tracing in mind so
+-- being too clever here is dangerous.
+
+-- The subtype test is not needed as there will be no (new) properties set, given that we
+-- reset the properties.
+
+if not nodes.properties then return end
+
+local next, rawget = next, rawget
+local utfchar = utf.char
+local fastcopy = table.fastcopy
+
+local trace_injections = false trackers.register("fonts.injections", function(v) trace_injections = v end)
+
+local report_injections = logs.reporter("fonts","injections")
+
+local attributes, nodes, node = attributes, nodes, node
+
+fonts = fonts
+local fontdata = fonts.hashes.identifiers
+
+nodes.injections = nodes.injections or { }
+local injections = nodes.injections
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local kern_code = nodecodes.kern
+
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local newkern = nodepool.kern
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local traverse_id = nuts.traverse_id
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local find_tail = nuts.tail
+
+local properties = nodes.properties.data
+
+function injections.installnewkern(nk)
+ newkern = nk or newkern
+end
+
+local nofregisteredkerns = 0
+local nofregisteredpairs = 0
+local nofregisteredmarks = 0
+local nofregisteredcursives = 0
+----- markanchors = { } -- one base can have more marks
+local keepregisteredcounts = false
+
+function injections.keepcounts()
+ keepregisteredcounts = true
+end
+
+function injections.resetcounts()
+ nofregisteredkerns = 0
+ nofregisteredpairs = 0
+ nofregisteredmarks = 0
+ nofregisteredcursives = 0
+ keepregisteredcounts = false
+end
+
+-- We need to make sure that a possible metatable will not kick in unexpectedly.
+
+function injections.reset(n)
+ local p = rawget(properties,n)
+ if p and rawget(p,"injections") then
+ p.injections = nil
+ end
+end
+
+function injections.copy(target,source)
+ local sp = rawget(properties,source)
+ if sp then
+ local tp = rawget(properties,target)
+ local si = rawget(sp,"injections")
+ if si then
+ si = fastcopy(si)
+ if tp then
+ tp.injections = si
+ else
+ propertydata[target] = {
+ injections = si,
+ }
+ end
+ else
+ if tp then
+ tp.injections = nil
+ end
+ end
+ end
+end
+
+function injections.setligaindex(n,index)
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.ligaindex = index
+ else
+ p.injections = {
+ ligaindex = index
+ }
+ end
+ else
+ properties[n] = {
+ injections = {
+ ligaindex = index
+ }
+ }
+ end
+end
+
+function injections.getligaindex(n,default)
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ return i.ligaindex or default
+ end
+ end
+ return default
+end
+
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) -- hm: nuts or nodes
+ local dx = factor*(exit[1]-entry[1])
+ local dy = -factor*(exit[2]-entry[2])
+ local ws = tfmstart.width
+ local wn = tfmnext.width
+ nofregisteredcursives = nofregisteredcursives + 1
+ if rlmode < 0 then
+ dx = -(dx + wn)
+ else
+ dx = dx - ws
+ end
+ --
+ local p = rawget(properties,start)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.cursiveanchor = true
+ else
+ p.injections = {
+ cursiveanchor = true,
+ }
+ end
+ else
+ properties[start] = {
+ injections = {
+ cursiveanchor = true,
+ },
+ }
+ end
+ local p = rawget(properties,nxt)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.cursivex = dx
+ i.cursivey = dy
+ else
+ p.injections = {
+ cursivex = dx,
+ cursivey = dy,
+ }
+ end
+ else
+ properties[nxt] = {
+ injections = {
+ cursivex = dx,
+ cursivey = dy,
+ },
+ }
+ end
+ return dx, dy, nofregisteredcursives
+end
+
+function injections.setpair(current,factor,rlmode,r2lflag,spec,injection) -- r2lflag & tfmchr not used
+ local x = factor*spec[1]
+ local y = factor*spec[2]
+ local w = factor*spec[3]
+ local h = factor*spec[4]
+ if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then -- okay?
+ local yoffset = y - h
+ local leftkern = x -- both kerns are set in a pair kern compared
+ local rightkern = w - x -- to normal kerns where we set only leftkern
+ if leftkern ~= 0 or rightkern ~= 0 or yoffset ~= 0 then
+ nofregisteredpairs = nofregisteredpairs + 1
+ if rlmode and rlmode < 0 then
+ leftkern, rightkern = rightkern, leftkern
+ end
+ if not injection then
+ injection = "injections"
+ end
+ local p = rawget(properties,current)
+ if p then
+ local i = rawget(p,injection)
+ if i then
+ if leftkern ~= 0 then
+ i.leftkern = (i.leftkern or 0) + leftkern
+ end
+ if rightkern ~= 0 then
+ i.rightkern = (i.rightkern or 0) + rightkern
+ end
+ if yoffset ~= 0 then
+ i.yoffset = (i.yoffset or 0) + yoffset
+ end
+ elseif leftkern ~= 0 or rightkern ~= 0 then
+ p[injection] = {
+ leftkern = leftkern,
+ rightkern = rightkern,
+ yoffset = yoffset,
+ }
+ else
+ p[injection] = {
+ yoffset = yoffset,
+ }
+ end
+ elseif leftkern ~= 0 or rightkern ~= 0 then
+ properties[current] = {
+ [injection] = {
+ leftkern = leftkern,
+ rightkern = rightkern,
+ yoffset = yoffset,
+ },
+ }
+ else
+ properties[current] = {
+ [injection] = {
+ yoffset = yoffset,
+ },
+ }
+ end
+ return x, y, w, h, nofregisteredpairs
+ end
+ end
+ return x, y, w, h -- no bound
+end
+
+-- This needs checking for rl < 0 but it is unlikely that a r2l script uses kernclasses between
+-- glyphs so we're probably safe (KE has a problematic font where marks interfere with rl < 0 in
+-- the previous case)
+
+function injections.setkern(current,factor,rlmode,x,injection)
+ local dx = factor * x
+ if dx ~= 0 then
+ nofregisteredkerns = nofregisteredkerns + 1
+ local p = rawget(properties,current)
+ if not injection then
+ injection = "injections"
+ end
+ if p then
+ local i = rawget(p,injection)
+ if i then
+ i.leftkern = dx + (i.leftkern or 0)
+ else
+ p[injection] = {
+ leftkern = dx,
+ }
+ end
+ else
+ properties[current] = {
+ [injection] = {
+ leftkern = dx,
+ },
+ }
+ end
+ return dx, nofregisteredkerns
+ else
+ return 0, 0
+ end
+end
+
+function injections.setmark(start,base,factor,rlmode,ba,ma,tfmbase,mkmk) -- ba=baseanchor, ma=markanchor
+ local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
+ nofregisteredmarks = nofregisteredmarks + 1
+ -- markanchors[nofregisteredmarks] = base
+ if rlmode >= 0 then
+ dx = tfmbase.width - dx -- see later commented ox
+ end
+ local p = rawget(properties,start)
+ -- hm, dejavu serif does a sloppy mark2mark before mark2base
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ if i.markmark then
+ -- out of order mkmk: yes or no or option
+ else
+ i.markx = dx
+ i.marky = dy
+ i.markdir = rlmode or 0
+ i.markbase = nofregisteredmarks
+ i.markbasenode = base
+ i.markmark = mkmk
+ end
+ else
+ p.injections = {
+ markx = dx,
+ marky = dy,
+ markdir = rlmode or 0,
+ markbase = nofregisteredmarks,
+ markbasenode = base,
+ markmark = mkmk,
+ }
+ end
+ else
+ properties[start] = {
+ injections = {
+ markx = dx,
+ marky = dy,
+ markdir = rlmode or 0,
+ markbase = nofregisteredmarks,
+ markbasenode = base,
+ markmark = mkmk,
+ },
+ }
+ end
+ return dx, dy, nofregisteredmarks
+end
+
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+
+local function showchar(n,nested)
+ local char = getchar(n)
+ report_injections("%wfont %s, char %U, glyph %c",nested and 2 or 0,getfont(n),char,char)
+end
+
+local function show(n,what,nested,symbol)
+ if n then
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,what)
+ if i then
+ local leftkern = i.leftkern or 0
+ local rightkern = i.rightkern or 0
+ local yoffset = i.yoffset or 0
+ local markx = i.markx or 0
+ local marky = i.marky or 0
+ local markdir = i.markdir or 0
+ local markbase = i.markbase or 0 -- will be markbasenode
+ local cursivex = i.cursivex or 0
+ local cursivey = i.cursivey or 0
+ local ligaindex = i.ligaindex or 0
+ local margin = nested and 4 or 2
+ --
+ if rightkern ~= 0 or yoffset ~= 0 then
+ report_injections("%w%s pair: lx %p, rx %p, dy %p",margin,symbol,leftkern,rightkern,yoffset)
+ elseif leftkern ~= 0 then
+ report_injections("%w%s kern: dx %p",margin,symbol,leftkern)
+ end
+ if markx ~= 0 or marky ~= 0 or markbase ~= 0 then
+ report_injections("%w%s mark: dx %p, dy %p, dir %s, base %s",margin,symbol,markx,marky,markdir,markbase ~= 0 and "yes" or "no")
+ end
+ if cursivex ~= 0 or cursivey ~= 0 then
+ report_injections("%w%s curs: dx %p, dy %p",margin,symbol,cursivex,cursivey)
+ end
+ if ligaindex ~= 0 then
+ report_injections("%w%s liga: index %i",margin,symbol,ligaindex)
+ end
+ end
+ end
+ end
+end
+
+local function showsub(n,what,where)
+ report_injections("begin subrun: %s",where)
+ for n in traverse_id(glyph_code,n) do
+ showchar(n,where)
+ show(n,what,where," ")
+ end
+ report_injections("end subrun")
+end
+
+local function trace(head,where)
+ report_injections("begin run %s: %s kerns, %s pairs, %s marks and %s cursives registered",
+ where or "",nofregisteredkerns,nofregisteredpairs,nofregisteredmarks,nofregisteredcursives)
+ local n = head
+ while n do
+ local id = getid(n)
+ if id == glyph_code then
+ showchar(n)
+ show(n,"injections",false," ")
+ show(n,"preinjections",false,"<")
+ show(n,"postinjections",false,">")
+ show(n,"replaceinjections",false,"=")
+ elseif id == disc_code then
+ local pre = getfield(n,"pre")
+ local post = getfield(n,"post")
+ local replace = getfield(n,"replace")
+ if pre then
+ showsub(pre,"preinjections","pre")
+ end
+ if post then
+ showsub(post,"postinjections","post")
+ end
+ if replace then
+ showsub(replace,"replaceinjections","replace")
+ end
+ end
+ n = getnext(n)
+ end
+ report_injections("end run")
+end
+
+local function show_result(head)
+ local current = head
+ local skipping = false
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",
+ getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
+ skipping = false
+ elseif id == kern_code then
+ report_injections("kern: %p",getfield(current,"kern"))
+ skipping = false
+ elseif not skipping then
+ report_injections()
+ skipping = true
+ end
+ current = getnext(current)
+ end
+end
+
+local function collect_glyphs(head,offsets)
+ local glyphs, glyphi, nofglyphs = { }, { }, 0
+ local marks, marki, nofmarks = { }, { }, 0
+ local nf, tm = nil, nil
+ local n = head
+
+ local function identify(n,what)
+ local f = getfont(n)
+ if f ~= nf then
+ nf = f
+ -- other hash in ctx:
+ tm = fontdata[nf].resources
+ if tm then
+ tm = tm.marks
+ end
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks = nofmarks + 1
+ marks[nofmarks] = n
+ marki[nofmarks] = "injections"
+ else
+ nofglyphs = nofglyphs + 1
+ glyphs[nofglyphs] = n
+ glyphi[nofglyphs] = what
+ end
+ if offsets then
+ -- yoffsets can influence curs steps
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,what)
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ end
+ end
+
+ while n do -- only needed for relevant fonts
+ local id = getid(n)
+ if id == glyph_code then
+ identify(n,"injections")
+ elseif id == disc_code then
+ local d = getfield(n,"pre")
+ if d then
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ identify(n,"preinjections")
+ end
+ end
+ end
+ local d = getfield(n,"post")
+ if d then
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ identify(n,"postinjections")
+ end
+ end
+ end
+ local d = getfield(n,"replace")
+ if d then
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ identify(n,"replaceinjections")
+ end
+ end
+ end
+ end
+ n = getnext(n)
+ end
+
+ return glyphs, glyphi, nofglyphs, marks, marki, nofmarks
+end
+
+local function inject_marks(marks,marki,nofmarks)
+ for i=1,nofmarks do
+ local n = marks[i]
+ local pn = rawget(properties,n)
+ if pn then
+ local ni = marki[i]
+ local pn = rawget(pn,ni)
+ if pn then
+ local p = pn.markbasenode
+ if p then
+ local px = getfield(p,"xoffset")
+ local ox = 0
+ local rightkern = nil
+ local pp = rawget(properties,p)
+ if pp then
+ pp = rawget(pp,ni)
+ if pp then
+ rightkern = pp.rightkern
+ end
+ end
+ if rightkern then -- x and w ~= 0
+ if pn.markdir < 0 then
+ -- kern(w-x) glyph(p) kern(x) mark(n)
+ ox = px - pn.markx - rightkern
+ -- report_injections("r2l case 1: %p",ox)
+ else
+ -- kern(x) glyph(p) kern(w-x) mark(n)
+ -- ox = px - getfield(p,"width") + pn.markx - pp.leftkern
+ --
+ -- According to Kai we don't need to handle leftkern here but I'm
+ -- pretty sure I've run into a case where it was needed so maybe
+ -- some day we need something more clever here.
+ --
+ if false then
+ -- a mark with kerning
+ local leftkern = pp.leftkern
+ if leftkern then
+ ox = px - pn.markx - leftkern
+ else
+ ox = px - pn.markx
+ end
+ else
+ ox = px - pn.markx
+ end
+ end
+ else
+ -- we need to deal with fonts that have marks with width
+ -- if pn.markdir < 0 then
+ -- ox = px - pn.markx
+ -- -- report_injections("r2l case 3: %p",ox)
+ -- else
+ -- -- ox = px - getfield(p,"width") + pn.markx
+ ox = px - pn.markx
+ -- report_injections("l2r case 3: %p",ox)
+ -- end
+ local wn = getfield(n,"width") -- in arial marks have widths
+ if wn ~= 0 then
+ -- bad: we should center
+ -- insert_node_before(head,n,newkern(-wn/2))
+ -- insert_node_after(head,n,newkern(-wn/2))
+ pn.leftkern = -wn/2
+ pn.rightkern = -wn/2
+ -- wx[n] = { 0, -wn/2, 0, -wn }
+ end
+ -- so far
+ end
+ setfield(n,"xoffset",ox)
+ --
+ local py = getfield(p,"yoffset")
+-- local oy = 0
+-- if marks[p] then
+-- oy = py + pn.marky
+-- else
+-- oy = getfield(n,"yoffset") + py + pn.marky
+-- end
+ local oy = getfield(n,"yoffset") + py + pn.marky
+ setfield(n,"yoffset",oy)
+ else
+ -- normally this can't happen (only when in trace mode which is a special case anyway)
+ -- report_injections("missing mark anchor %i",pn.markbase or 0)
+ end
+ end
+ end
+ end
+end
+
+local function inject_cursives(glyphs,glyphi,nofglyphs)
+ local cursiveanchor, lastanchor = nil, nil
+ local minc, maxc, last = 0, 0, nil
+ for i=1,nofglyphs do
+ local n = glyphs[i]
+ local pn = rawget(properties,n)
+ if pn then
+ pn = rawget(pn,glyphi[i])
+ end
+ if pn then
+ local cursivex = pn.cursivex
+ if cursivex then
+ if cursiveanchor then
+ if cursivex ~= 0 then
+ pn.leftkern = (pn.leftkern or 0) + cursivex
+ end
+ if lastanchor then
+ if maxc == 0 then
+ minc = lastanchor
+ end
+ maxc = lastanchor
+ properties[cursiveanchor].cursivedy = pn.cursivey
+ end
+ last = n
+ else
+ maxc = 0
+ end
+ elseif maxc > 0 then
+ local ny = getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",ny) -- why not add ?
+ end
+ maxc = 0
+ end
+ if pn.cursiveanchor then
+ cursiveanchor = n
+ lastanchor = i
+ else
+ cursiveanchor = nil
+ lastanchor = nil
+ if maxc > 0 then
+ local ny = getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",ny) -- why not add ?
+ end
+ maxc = 0
+ end
+ end
+ elseif maxc > 0 then
+ local ny = getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",getfield(ti,"yoffset") + ny) -- ?
+ end
+ maxc = 0
+ cursiveanchor = nil
+ lastanchor = nil
+ end
+ -- if maxc > 0 and not cursiveanchor then
+ -- local ny = getfield(n,"yoffset")
+ -- for i=maxc,minc,-1 do
+ -- local ti = glyphs[i][1]
+ -- ny = ny + properties[ti].cursivedy
+ -- setfield(ti,"yoffset",ny) -- why not add ?
+ -- end
+ -- maxc = 0
+ -- end
+ end
+ if last and maxc > 0 then
+ local ny = getfield(last,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",ny) -- why not add ?
+ end
+ end
+end
+
+-- G +D-pre G
+-- D-post+
+-- +D-replace+
+--
+-- G +D-pre +D-pre
+-- D-post +D-post
+-- +D-replace +D-replace
+
+local function inject_kerns(head,glist,ilist,length) -- not complete ! compare with inject_kerns_only (but unlikely disc here)
+ for i=1,length do
+ local n = glist[i]
+ local pn = rawget(properties,n)
+ if pn then
+ local dp = nil
+ local dr = nil
+ local ni = ilist[i]
+ local p = nil
+ if ni == "injections" then
+ p = getprev(n)
+ if p then
+ local id = getid(p)
+ if id == disc_code then
+ dp = getfield(p,"post")
+ dr = getfield(p,"replace")
+ end
+ end
+ end
+ if dp then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(dp)
+ insert_node_after(dp,t,newkern(leftkern))
+ setfield(p,"post",dp) -- currently we need to force a tail refresh
+ end
+ end
+ end
+ if dr then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(dr)
+ insert_node_after(dr,t,newkern(leftkern))
+ setfield(p,"replace",dr) -- currently we need to force a tail refresh
+ end
+ end
+ else
+ local i = rawget(pn,ni)
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ insert_node_before(head,n,newkern(leftkern)) -- type 0/2
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern)) -- type 0/2
+ end
+ end
+ end
+ end
+ end
+end
+
+local function inject_everything(head,where)
+ head = tonut(head)
+ if trace_injections then
+ trace(head,"everything")
+ end
+ local glyphs, glyphi, nofglyphs, marks, marki, nofmarks = collect_glyphs(head,nofregisteredpairs > 0)
+ if nofglyphs > 0 then
+ if nofregisteredcursives > 0 then
+ inject_cursives(glyphs,glyphi,nofglyphs)
+ end
+ if nofregisteredmarks > 0 then -- and nofmarks > 0
+ inject_marks(marks,marki,nofmarks)
+ end
+ inject_kerns(head,glyphs,glyphi,nofglyphs)
+ end
+ if nofmarks > 0 then
+ inject_kerns(head,marks,marki,nofmarks)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts = false
+ else
+ nofregisteredkerns = 0
+ nofregisteredpairs = 0
+ nofregisteredmarks = 0
+ nofregisteredcursives = 0
+ end
+ return tonode(head), true
+end
+
+-- G +D-pre G
+-- D-post+
+-- +D-replace+
+--
+-- G +D-pre +D-pre
+-- D-post +D-post
+-- +D-replace +D-replace
+
+local function inject_kerns_only(head,where)
+ head = tonut(head)
+ if trace_injections then
+ trace(head,"kerns")
+ end
+ local n = head
+ local p = nil -- disc node when non-nil
+ while n do
+ local id = getid(n)
+ if id == glyph_code then
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ if p then
+ local d = getfield(p,"post")
+ if d then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ setfield(p,"post",d) -- currently we need to force a tail refresh
+ end
+ end
+ end
+ local d = getfield(p,"replace")
+ if d then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ setfield(p,"replace",d) -- currently we need to force a tail refresh
+ end
+ end
+ else
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ end
+ end
+ else
+ -- this is the most common case
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ head = insert_node_before(head,n,newkern(leftkern))
+ end
+ end
+ end
+ end
+ end
+ p = nil
+ elseif id == disc_code then
+ local d = getfield(n,"pre")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"preinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"pre",h)
+ end
+ end
+ local d = getfield(n,"post")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"post",h)
+ end
+ end
+ local d = getfield(n,"replace")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"replace",h)
+ end
+ end
+ p = n
+ else
+ p = nil
+ end
+ n = getnext(n)
+ end
+ --
+ if keepregisteredcounts then
+ keepregisteredcounts = false
+ else
+ nofregisteredkerns = 0
+ end
+ return tonode(head), true
+end
+
+local function inject_pairs_only(head,where)
+ head = tonut(head)
+ if trace_injections then
+ trace(head,"pairs")
+ end
+ local n = head
+ local p = nil -- disc node when non-nil
+ while n do
+ local id = getid(n)
+ if id == glyph_code then
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ if p then
+ local d = getfield(p,"post")
+ if d then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ setfield(p,"post",d) -- currently we need to force a tail refresh
+ end
+ -- local rightkern = i.rightkern
+ -- if rightkern and rightkern ~= 0 then
+ -- insert_node_after(head,n,newkern(rightkern))
+ -- n = getnext(n) -- to be checked
+ -- end
+ end
+ end
+ local d = getfield(p,"replace")
+ if d then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ setfield(p,"replace",d) -- currently we need to force a tail refresh
+ end
+ -- local rightkern = i.rightkern
+ -- if rightkern and rightkern ~= 0 then
+ -- insert_node_after(head,n,newkern(rightkern))
+ -- n = getnext(n) -- to be checked
+ -- end
+ end
+ else
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ -- local rightkern = i.rightkern
+ -- if rightkern and rightkern ~= 0 then
+ -- insert_node_after(head,n,newkern(rightkern))
+ -- n = getnext(n) -- to be checked
+ -- end
+ end
+ end
+ else
+ -- this is the most common case
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ head = insert_node_before(head,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ end
+ end
+ p = nil
+ elseif id == disc_code then
+ local d = getfield(n,"pre")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"preinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"pre",h)
+ end
+ end
+ local d = getfield(n,"post")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"post",h)
+ end
+ end
+ local d = getfield(n,"replace")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"replace",h)
+ end
+ end
+ p = n
+ else
+ p = nil
+ end
+ n = getnext(n)
+ end
+ --
+ if keepregisteredcounts then
+ keepregisteredcounts = false
+ else
+ nofregisteredpairs = 0
+ nofregisteredkerns = 0
+ end
+ return tonode(head), true
+end
+
+function injections.handler(head,where)
+ if nofregisteredmarks > 0 or nofregisteredcursives > 0 then
+ return inject_everything(head,where)
+ elseif nofregisteredpairs > 0 then
+ return inject_pairs_only(head,where)
+ elseif nofregisteredkerns > 0 then
+ return inject_kerns_only(head,where)
+ else
+ return head, false
+ end
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-lua.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-lua.lua
index ec3fe38be3e..ec3fe38be3e 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-lua.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-lua.lua
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-ota.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-ota.lua
new file mode 100644
index 00000000000..f083fe09e83
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-ota.lua
@@ -0,0 +1,459 @@
+if not modules then modules = { } end modules ['font-otx'] = {
+ version = 1.001,
+ comment = "companion to font-otf.lua (analysing)",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type = type
+
+if not trackers then trackers = { register = function() end } end
+
+----- trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
+
+local fonts, nodes, node = fonts, nodes, node
+
+local allocate = utilities.storage.allocate
+
+local otf = fonts.handlers.otf
+
+local analyzers = fonts.analyzers
+local initializers = allocate()
+local methods = allocate()
+
+analyzers.initializers = initializers
+analyzers.methods = methods
+analyzers.useunicodemarks = false
+
+local a_state = attributes.private('state')
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local traverse_id = nuts.traverse_id
+local traverse_node_list = nuts.traverse
+local end_of_math = nuts.end_of_math
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local math_code = nodecodes.math
+
+local fontdata = fonts.hashes.identifiers
+local categories = characters and characters.categories or { } -- sorry, only in context
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+--[[ldx--
+<p>Analyzers run per script and/or language and are needed in order to
+process features right.</p>
+--ldx]]--
+
+-- never use these numbers directly
+
+local s_init = 1 local s_rphf = 7
+local s_medi = 2 local s_half = 8
+local s_fina = 3 local s_pref = 9
+local s_isol = 4 local s_blwf = 10
+local s_mark = 5 local s_pstf = 11
+local s_rest = 6
+
+local states = {
+ init = s_init,
+ medi = s_medi,
+ fina = s_fina,
+ isol = s_isol,
+ mark = s_mark,
+ rest = s_rest,
+ rphf = s_rphf,
+ half = s_half,
+ pref = s_pref,
+ blwf = s_blwf,
+ pstf = s_pstf,
+}
+
+local features = {
+ init = s_init,
+ medi = s_medi,
+ fina = s_fina,
+ isol = s_isol,
+ -- mark = s_mark,
+ -- rest = s_rest,
+ rphf = s_rphf,
+ half = s_half,
+ pref = s_pref,
+ blwf = s_blwf,
+ pstf = s_pstf,
+}
+
+analyzers.states = states
+analyzers.features = features
+
+-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
+-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
+
+function analyzers.setstate(head,font)
+ local useunicodemarks = analyzers.useunicodemarks
+ local tfmdata = fontdata[font]
+ local descriptions = tfmdata.descriptions
+ local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
+ current = tonut(current)
+ while current do
+ local id = getid(current)
+ if id == glyph_code and getfont(current) == font then
+ done = true
+ local char = getchar(current)
+ local d = descriptions[char]
+ if d then
+ if d.class == "mark" or (useunicodemarks and categories[char] == "mn") then
+ done = true
+ setprop(current,a_state,s_mark)
+ elseif n == 0 then
+ first, last, n = current, current, 1
+ setprop(current,a_state,s_init)
+ else
+ last, n = current, n+1
+ setprop(current,a_state,s_medi)
+ end
+ else -- finish
+ if first and first == last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ first, last, n = nil, nil, 0
+ end
+ elseif id == disc_code then
+ -- always in the middle
+ setprop(current,a_state,s_medi)
+ last = current
+ else -- finish
+ if first and first == last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ first, last, n = nil, nil, 0
+ if id == math_code then
+ current = end_of_math(current)
+ end
+ end
+ current = getnext(current)
+ end
+ if first and first == last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ return head, done
+end
+
+-- in the future we will use language/script attributes instead of the
+-- font related value, but then we also need dynamic features which is
+-- somewhat slower; and .. we need a chain of them
+
+local function analyzeinitializer(tfmdata,value) -- attr
+ local script, language = otf.scriptandlanguage(tfmdata) -- attr
+ local action = initializers[script]
+ if not action then
+ -- skip
+ elseif type(action) == "function" then
+ return action(tfmdata,value)
+ else
+ local action = action[language]
+ if action then
+ return action(tfmdata,value)
+ end
+ end
+end
+
+local function analyzeprocessor(head,font,attr)
+ local tfmdata = fontdata[font]
+ local script, language = otf.scriptandlanguage(tfmdata,attr)
+ local action = methods[script]
+ if not action then
+ -- skip
+ elseif type(action) == "function" then
+ return action(head,font,attr)
+ else
+ action = action[language]
+ if action then
+ return action(head,font,attr)
+ end
+ end
+ return head, false
+end
+
+registerotffeature {
+ name = "analyze",
+ description = "analysis of character classes",
+ default = true,
+ initializers = {
+ node = analyzeinitializer,
+ },
+ processors = {
+ position = 1,
+ node = analyzeprocessor,
+ }
+}
+
+-- latin
+
+methods.latn = analyzers.setstate
+
+-- This info eventually can go into char-def and we will have a state
+-- table for generic then (unicode recognized all states but in practice
+-- only has only
+--
+-- isolated : isol
+-- final : isol_fina
+-- medial : isol_fina_medi_init
+--
+-- so in practice, without analyzer it's rather useless info which is
+-- why having it in char-def makes only sense for special purposes (like)
+-- like tracing cq. visualizing.
+
+local tatweel = 0x0640
+local zwnj = 0x200C
+local zwj = 0x200D
+
+local isolated = { -- isol
+ [0x0600] = true, [0x0601] = true, [0x0602] = true, [0x0603] = true,
+ [0x0604] = true,
+ [0x0608] = true, [0x060B] = true, [0x0621] = true, [0x0674] = true,
+ [0x06DD] = true,
+ -- mandaic
+ [0x0856] = true, [0x0858] = true, [0x0857] = true,
+ -- n'ko
+ [0x07FA] = true,
+ -- also here:
+ [zwnj] = true,
+ -- 7
+ [0x08AD] = true,
+}
+
+local final = { -- isol_fina
+ [0x0622] = true, [0x0623] = true, [0x0624] = true, [0x0625] = true,
+ [0x0627] = true, [0x0629] = true, [0x062F] = true, [0x0630] = true,
+ [0x0631] = true, [0x0632] = true, [0x0648] = true, [0x0671] = true,
+ [0x0672] = true, [0x0673] = true, [0x0675] = true, [0x0676] = true,
+ [0x0677] = true, [0x0688] = true, [0x0689] = true, [0x068A] = true,
+ [0x068B] = true, [0x068C] = true, [0x068D] = true, [0x068E] = true,
+ [0x068F] = true, [0x0690] = true, [0x0691] = true, [0x0692] = true,
+ [0x0693] = true, [0x0694] = true, [0x0695] = true, [0x0696] = true,
+ [0x0697] = true, [0x0698] = true, [0x0699] = true, [0x06C0] = true,
+ [0x06C3] = true, [0x06C4] = true, [0x06C5] = true, [0x06C6] = true,
+ [0x06C7] = true, [0x06C8] = true, [0x06C9] = true, [0x06CA] = true,
+ [0x06CB] = true, [0x06CD] = true, [0x06CF] = true, [0x06D2] = true,
+ [0x06D3] = true, [0x06D5] = true, [0x06EE] = true, [0x06EF] = true,
+ [0x0759] = true, [0x075A] = true, [0x075B] = true, [0x076B] = true,
+ [0x076C] = true, [0x0771] = true, [0x0773] = true, [0x0774] = true,
+ [0x0778] = true, [0x0779] = true,
+ [0x08AA] = true, [0x08AB] = true, [0x08AC] = true,
+ [0xFEF5] = true, [0xFEF7] = true, [0xFEF9] = true, [0xFEFB] = true,
+ -- syriac
+ [0x0710] = true, [0x0715] = true, [0x0716] = true, [0x0717] = true,
+ [0x0718] = true, [0x0719] = true, [0x0728] = true, [0x072A] = true,
+ [0x072C] = true, [0x071E] = true,
+ [0x072F] = true, [0x074D] = true,
+ -- mandaic
+ [0x0840] = true, [0x0849] = true, [0x0854] = true, [0x0846] = true,
+ [0x084F] = true,
+ -- 7
+ [0x08AE] = true, [0x08B1] = true, [0x08B2] = true,
+}
+
+local medial = { -- isol_fina_medi_init
+ [0x0626] = true, [0x0628] = true, [0x062A] = true, [0x062B] = true,
+ [0x062C] = true, [0x062D] = true, [0x062E] = true, [0x0633] = true,
+ [0x0634] = true, [0x0635] = true, [0x0636] = true, [0x0637] = true,
+ [0x0638] = true, [0x0639] = true, [0x063A] = true, [0x063B] = true,
+ [0x063C] = true, [0x063D] = true, [0x063E] = true, [0x063F] = true,
+ [0x0641] = true, [0x0642] = true, [0x0643] = true,
+ [0x0644] = true, [0x0645] = true, [0x0646] = true, [0x0647] = true,
+ [0x0649] = true, [0x064A] = true, [0x066E] = true, [0x066F] = true,
+ [0x0678] = true, [0x0679] = true, [0x067A] = true, [0x067B] = true,
+ [0x067C] = true, [0x067D] = true, [0x067E] = true, [0x067F] = true,
+ [0x0680] = true, [0x0681] = true, [0x0682] = true, [0x0683] = true,
+ [0x0684] = true, [0x0685] = true, [0x0686] = true, [0x0687] = true,
+ [0x069A] = true, [0x069B] = true, [0x069C] = true, [0x069D] = true,
+ [0x069E] = true, [0x069F] = true, [0x06A0] = true, [0x06A1] = true,
+ [0x06A2] = true, [0x06A3] = true, [0x06A4] = true, [0x06A5] = true,
+ [0x06A6] = true, [0x06A7] = true, [0x06A8] = true, [0x06A9] = true,
+ [0x06AA] = true, [0x06AB] = true, [0x06AC] = true, [0x06AD] = true,
+ [0x06AE] = true, [0x06AF] = true, [0x06B0] = true, [0x06B1] = true,
+ [0x06B2] = true, [0x06B3] = true, [0x06B4] = true, [0x06B5] = true,
+ [0x06B6] = true, [0x06B7] = true, [0x06B8] = true, [0x06B9] = true,
+ [0x06BA] = true, [0x06BB] = true, [0x06BC] = true, [0x06BD] = true,
+ [0x06BE] = true, [0x06BF] = true, [0x06C1] = true, [0x06C2] = true,
+ [0x06CC] = true, [0x06CE] = true, [0x06D0] = true, [0x06D1] = true,
+ [0x06FA] = true, [0x06FB] = true, [0x06FC] = true, [0x06FF] = true,
+ [0x0750] = true, [0x0751] = true, [0x0752] = true, [0x0753] = true,
+ [0x0754] = true, [0x0755] = true, [0x0756] = true, [0x0757] = true,
+ [0x0758] = true, [0x075C] = true, [0x075D] = true, [0x075E] = true,
+ [0x075F] = true, [0x0760] = true, [0x0761] = true, [0x0762] = true,
+ [0x0763] = true, [0x0764] = true, [0x0765] = true, [0x0766] = true,
+ [0x0767] = true, [0x0768] = true, [0x0769] = true, [0x076A] = true,
+ [0x076D] = true, [0x076E] = true, [0x076F] = true, [0x0770] = true,
+ [0x0772] = true, [0x0775] = true, [0x0776] = true, [0x0777] = true,
+ [0x077A] = true, [0x077B] = true, [0x077C] = true, [0x077D] = true,
+ [0x077E] = true, [0x077F] = true,
+ [0x08A0] = true, [0x08A2] = true, [0x08A4] = true, [0x08A5] = true,
+ [0x08A6] = true, [0x0620] = true, [0x08A8] = true, [0x08A9] = true,
+ [0x08A7] = true, [0x08A3] = true,
+ -- syriac
+ [0x0712] = true, [0x0713] = true, [0x0714] = true, [0x071A] = true,
+ [0x071B] = true, [0x071C] = true, [0x071D] = true, [0x071F] = true,
+ [0x0720] = true, [0x0721] = true, [0x0722] = true, [0x0723] = true,
+ [0x0724] = true, [0x0725] = true, [0x0726] = true, [0x0727] = true,
+ [0x0729] = true, [0x072B] = true, [0x072D] = true, [0x072E] = true,
+ [0x074E] = true, [0x074F] = true,
+ -- mandaic
+ [0x0841] = true, [0x0842] = true, [0x0843] = true, [0x0844] = true,
+ [0x0845] = true, [0x0847] = true, [0x0848] = true, [0x0855] = true,
+ [0x0851] = true, [0x084E] = true, [0x084D] = true, [0x084A] = true,
+ [0x084B] = true, [0x084C] = true, [0x0850] = true, [0x0852] = true,
+ [0x0853] = true,
+ -- n'ko
+ [0x07D7] = true, [0x07E8] = true, [0x07D9] = true, [0x07EA] = true,
+ [0x07CA] = true, [0x07DB] = true, [0x07CC] = true, [0x07DD] = true,
+ [0x07CE] = true, [0x07DF] = true, [0x07D4] = true, [0x07E5] = true,
+ [0x07E9] = true, [0x07E7] = true, [0x07E3] = true, [0x07E2] = true,
+ [0x07E0] = true, [0x07E1] = true, [0x07DE] = true, [0x07DC] = true,
+ [0x07D1] = true, [0x07DA] = true, [0x07D8] = true, [0x07D6] = true,
+ [0x07D2] = true, [0x07D0] = true, [0x07CF] = true, [0x07CD] = true,
+ [0x07CB] = true, [0x07D3] = true, [0x07E4] = true, [0x07D5] = true,
+ [0x07E6] = true,
+ -- also here:
+ [tatweel]= true, [zwj] = true,
+ -- 7
+ [0x08A1] = true, [0x08AF] = true, [0x08B0] = true,
+}
+
+local arab_warned = { }
+
+-- todo: gref
+
+local function warning(current,what)
+ local char = getchar(current)
+ if not arab_warned[char] then
+ log.report("analyze","arab: character %C has no %a class",char,what)
+ arab_warned[char] = true
+ end
+end
+
+-- potential optimization: local medial_final = table.merged(medial,final)
+
+local function finish(first,last)
+ if last then
+ if first == last then
+ local fc = getchar(first)
+ if medial[fc] or final[fc] then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ else
+ local lc = getchar(last)
+ if medial[lc] or final[lc] then
+ -- if laststate == 1 or laststate == 2 or laststate == 4 then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ end
+ first, last = nil, nil
+ elseif first then
+ -- first and last are either both set so we never com here
+ local fc = getchar(first)
+ if medial[fc] or final[fc] then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ first = nil
+ end
+ return first, last
+end
+
+function methods.arab(head,font,attr)
+ local useunicodemarks = analyzers.useunicodemarks
+ local tfmdata = fontdata[font]
+ local marks = tfmdata.resources.marks
+ local first, last, current, done = nil, nil, head, false
+ current = tonut(current)
+ while current do
+ local id = getid(current)
+ if id == glyph_code and getfont(current) == font and getsubtype(current)<256 and not getprop(current,a_state) then
+ done = true
+ local char = getchar(current)
+ if marks[char] or (useunicodemarks and categories[char] == "mn") then
+ setprop(current,a_state,s_mark)
+ elseif isolated[char] then -- can be zwj or zwnj too
+ first, last = finish(first,last)
+ setprop(current,a_state,s_isol)
+ first, last = nil, nil
+ elseif not first then
+ if medial[char] then
+ setprop(current,a_state,s_init)
+ first, last = first or current, current
+ elseif final[char] then
+ setprop(current,a_state,s_isol)
+ first, last = nil, nil
+ else -- no arab
+ first, last = finish(first,last)
+ end
+ elseif medial[char] then
+ first, last = first or current, current
+ setprop(current,a_state,s_medi)
+ elseif final[char] then
+ if getprop(last,a_state) ~= s_init then
+ -- tricky, we need to check what last may be !
+ setprop(last,a_state,s_medi)
+ end
+ setprop(current,a_state,s_fina)
+ first, last = nil, nil
+ elseif char >= 0x0600 and char <= 0x06FF then -- needs checking
+ setprop(current,a_state,s_rest)
+ first, last = finish(first,last)
+ else -- no
+ first, last = finish(first,last)
+ end
+ else
+ if first or last then
+ first, last = finish(first,last)
+ end
+ if id == math_code then
+ current = end_of_math(current)
+ end
+ end
+ current = getnext(current)
+ end
+ if first or last then
+ finish(first,last)
+ end
+ return head, done
+end
+
+methods.syrc = methods.arab
+methods.mand = methods.arab
+methods.nko = methods.arab
+
+directives.register("otf.analyze.useunicodemarks",function(v)
+ analyzers.useunicodemarks = v
+end)
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-otn.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-otn.lua
index c57be5f0278..7fafadbc4fd 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-otn.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-otn.lua
@@ -6,8 +6,21 @@ if not modules then modules = { } end modules ['font-otn'] = {
license = "see context related readme files",
}
+-- this is a context version which can contain experimental code, but when we
+-- have serious patches we also need to change the other two font-otn files
+
+-- at some point i might decide to convert the whole list into a table and then
+-- run over that instead (but it has some drawbacks as we also need to deal with
+-- attributes and such so we need to keep a lot of track - which is why i rejected
+-- that method - although it has become a bit easier in the meantime so it might
+-- become an alternative (by that time i probably have gone completely lua) .. the
+-- usual chicken-egg issues ... maybe mkix as it's no real tex any more then
+
-- preprocessors = { "nodes" }
+-- anchor class : mark, mkmk, curs, mklg (todo)
+-- anchor type : mark, basechar, baselig, basemark, centry, cexit, max (todo)
+
-- this is still somewhat preliminary and it will get better in due time;
-- much functionality could only be implemented thanks to the husayni font
-- of Idris Samawi Hamid to who we dedicate this module.
@@ -20,7 +33,6 @@ if not modules then modules = { } end modules ['font-otn'] = {
-- todo:
--
--- kerning is probably not yet ok for latin around dics nodes (interesting challenge)
-- extension infrastructure (for usage out of context)
-- sorting features according to vendors/renderers
-- alternative loop quitters
@@ -32,7 +44,18 @@ if not modules then modules = { } end modules ['font-otn'] = {
-- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
-- remove some optimizations (when I have a faster machine)
--
--- maybe redo the lot some way (more context specific)
+-- beware:
+--
+-- we do some disc jugling where we need to keep in mind that the
+-- pre, post and replace fields can have prev pointers to a nesting
+-- node ... i wonder if that is still needed
+--
+-- not possible:
+--
+-- \discretionary {alpha-} {betagammadelta}
+-- {\discretionary {alphabeta-} {gammadelta}
+-- {\discretionary {alphabetagamma-} {delta}
+-- {alphabetagammadelta}}}
--[[ldx--
<p>This module is a bit more split up that I'd like but since we also want to test
@@ -57,9 +80,12 @@ is currently acceptable. Not all functions are implemented yet, often because I
lack the fonts for testing. Many scripts are not yet supported either, but I will
look into them as soon as <l n='context'/> users ask for it.</p>
-<p>Because there are different interpretations possible, I will extend the code
-with more (configureable) variants. I can also add hooks for users so that they can
-write their own extensions.</p>
+<p>The specification leaves room for interpretation. In case of doubt the microsoft
+implementation is the reference as it is the most complete one. As they deal with
+lots of scripts and fonts, Kai and Ivo did a lot of testing of the generic code and
+their suggestions help improve the code. I'm aware that not all border cases can be
+taken care of, unless we accept excessive runtime, and even then the interference
+with other mechanisms (like hyphenation) are not trivial.</p>
<p>Glyphs are indexed not by unicode but in their own way. This is because there is no
relationship with unicode at all, apart from the fact that a font might cover certain
@@ -86,12 +112,12 @@ when there's a fix in the <l n='fontforge'/> library or <l n='lua'/> code that
results in different tables.</p>
--ldx]]--
--- action handler chainproc chainmore comment
+-- action handler chainproc
--
--- gsub_single ok ok ok
--- gsub_multiple ok ok not implemented yet
--- gsub_alternate ok ok not implemented yet
--- gsub_ligature ok ok ok
+-- gsub_single ok ok
+-- gsub_multiple ok ok
+-- gsub_alternate ok ok
+-- gsub_ligature ok ok
-- gsub_context ok --
-- gsub_contextchain ok --
-- gsub_reversecontextchain ok --
@@ -115,7 +141,6 @@ results in different tables.</p>
-- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
--
-- remark: the 'not implemented yet' variants will be done when we have fonts that use them
--- remark: we need to check what to do with discretionaries
-- We used to have independent hashes for lookups but as the tags are unique
-- we now use only one hash. If needed we can have multiple again but in that
@@ -123,16 +148,14 @@ results in different tables.</p>
-- Todo: make plugin feature that operates on char/glyphnode arrays
-local concat, insert, remove = table.concat, table.insert, table.remove
-local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local lpegmatch = lpeg.match
+local type, next, tonumber = type, next, tonumber
local random = math.random
local formatters = string.formatters
local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
-local registertracker = trackers.register
+local registertracker = trackers.register
+local registerdirective = directives.register
local fonts = fonts
local otf = fonts.handlers.otf
@@ -154,12 +177,23 @@ local trace_steps = false registertracker("otf.steps", function(v
local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
+local trace_kernruns = false registertracker("otf.kernruns", function(v) trace_kernruns = v end)
+local trace_discruns = false registertracker("otf.discruns", function(v) trace_discruns = v end)
+local trace_compruns = false registertracker("otf.compruns", function(v) trace_compruns = v end)
+
+local quit_on_no_replacement = true -- maybe per font
+local zwnjruns = true
+
+registerdirective("otf.zwnjruns", function(v) zwnjruns = v end)
+registerdirective("otf.chain.quitonnoreplacement",function(value) quit_on_no_replacement = value end)
+
local report_direct = logs.reporter("fonts","otf direct")
local report_subchain = logs.reporter("fonts","otf subchain")
local report_chain = logs.reporter("fonts","otf chain")
local report_process = logs.reporter("fonts","otf process")
local report_prepare = logs.reporter("fonts","otf prepare")
local report_warning = logs.reporter("fonts","otf warning")
+local report_run = logs.reporter("fonts","otf run")
registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
@@ -171,12 +205,35 @@ registertracker("otf.injections","nodes.injections")
registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
-local insert_node_after = node.insert_after
-local delete_node = nodes.delete
-local copy_node = node.copy
-local find_node_tail = node.tail or node.slide
-local flush_node_list = node.flush_list
-local end_of_math = node.end_of_math
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local delete_node = nuts.delete
+local remove_node = nuts.remove
+local copy_node = nuts.copy
+local copy_node_list = nuts.copy_list
+local find_node_tail = nuts.tail
+local flush_node_list = nuts.flush_list
+local free_node = nuts.free
+local end_of_math = nuts.end_of_math
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
local setmetatableindex = table.setmetatableindex
@@ -193,43 +250,35 @@ local disccodes = nodes.disccodes
local glyph_code = nodecodes.glyph
local glue_code = nodecodes.glue
local disc_code = nodecodes.disc
-local whatsit_code = nodecodes.whatsit
local math_code = nodecodes.math
local dir_code = whatcodes.dir
local localpar_code = whatcodes.localpar
-
local discretionary_code = disccodes.discretionary
-
local ligature_code = glyphcodes.ligature
local privateattribute = attributes.private
-- Something is messed up: we have two mark / ligature indices, one at the injection
--- end and one here ... this is bases in KE's patches but there is something fishy
+-- end and one here ... this is based on KE's patches but there is something fishy
-- there as I'm pretty sure that for husayni we need some connection (as it's much
-- more complex than an average font) but I need proper examples of all cases, not
-- of only some.
local a_state = privateattribute('state')
-local a_markbase = privateattribute('markbase')
-local a_markmark = privateattribute('markmark')
-local a_markdone = privateattribute('markdone') -- assigned at the injection end
-local a_cursbase = privateattribute('cursbase')
-local a_curscurs = privateattribute('curscurs')
-local a_cursdone = privateattribute('cursdone')
-local a_kernpair = privateattribute('kernpair')
-local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined)
+local a_cursbase = privateattribute('cursbase') -- to be checked, probably can go
local injections = nodes.injections
local setmark = injections.setmark
local setcursive = injections.setcursive
local setkern = injections.setkern
local setpair = injections.setpair
+local resetinjection = injections.reset
+local copyinjection = injections.copy
+local setligaindex = injections.setligaindex
+local getligaindex = injections.getligaindex
-local markonce = true
local cursonce = true
-local kernonce = true
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
@@ -252,11 +301,19 @@ local currentfont = false
local lookuptable = false
local anchorlookups = false
local lookuptypes = false
+local lookuptags = false
local handlers = { }
local rlmode = 0
local featurevalue = false
--- head is always a whatsit so we can safely assume that head is not changed
+local sweephead = { }
+local sweepnode = nil
+local sweepprev = nil
+local sweepnext = nil
+
+local notmatchpre = { }
+local notmatchpost = { }
+local notmatchreplace = { }
-- we use this for special testing and documentation
@@ -306,20 +363,20 @@ end
local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_
if index then
- return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index)
elseif lookupname then
- return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname])
elseif chainlookupname then
- return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
+ return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname])
elseif chainname then
- return formatters["feature %a, chain %a"](kind,chainname)
+ return formatters["feature %a, chain %a"](kind,lookuptags[chainname])
else
return formatters["feature %a"](kind)
end
end
local function pref(kind,lookupname)
- return formatters["feature %a, lookup %a"](kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname])
end
-- We can assume that languages that use marks are not hyphenated. We can also assume
@@ -332,42 +389,107 @@ end
-- and indices.
local function copy_glyph(g) -- next and prev are untouched !
- local components = g.components
+ local components = getfield(g,"components")
if components then
- g.components = nil
+ setfield(g,"components",nil)
+ local n = copy_node(g)
+ copyinjection(n,g) -- we need to preserve the lig indices
+ setfield(g,"components",components)
+ return n
+ else
local n = copy_node(g)
- g.components = components
+ copyinjection(n,g) -- we need to preserve the lig indices
return n
+ end
+end
+
+local function flattendisk(head,disc)
+ local replace = getfield(disc,"replace")
+ setfield(disc,"replace",nil)
+ free_node(disc)
+ if head == disc then
+ local next = getnext(disc)
+ if replace then
+ if next then
+ local tail = find_node_tail(replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ end
+ return replace, replace
+ elseif next then
+ return next, next
+ else
+ return -- maybe warning
+ end
else
- return copy_node(g)
+ local next = getnext(disc)
+ local prev = getprev(disc)
+ if replace then
+ local tail = find_node_tail(replace)
+ if next then
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ end
+ setfield(prev,"next",replace)
+ setfield(replace,"prev",prev)
+ return head, replace
+ else
+ if next then
+ setfield(next,"prev",prev)
+ end
+ setfield(prev,"next",next)
+ return head, next
+ end
+ end
+end
+
+local function appenddisc(disc,list)
+ local post = getfield(disc,"post")
+ local replace = getfield(disc,"replace")
+ local phead = list
+ local rhead = copy_node_list(list)
+ local ptail = find_node_tail(post)
+ local rtail = find_node_tail(replace)
+ if post then
+ setfield(ptail,"next",phead)
+ setfield(phead,"prev",ptail)
+ else
+ setfield(disc,"post",phead)
+ end
+ if replace then
+ setfield(rtail,"next",rhead)
+ setfield(rhead,"prev",rtail)
+ else
+ setfield(disc,"replace",rhead)
end
end
-- start is a mark and we need to keep that one
local function markstoligature(kind,lookupname,head,start,stop,char)
- if start == stop and start.char == char then
+ if start == stop and getchar(start) == char then
return head, start
else
- local prev = start.prev
- local next = stop.next
- start.prev = nil
- stop.next = nil
+ local prev = getprev(start)
+ local next = getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
local base = copy_glyph(start)
if head == start then
head = base
end
- base.char = char
- base.subtype = ligature_code
- base.components = start
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start)
if prev then
- prev.next = base
+ setfield(prev,"next",base)
end
if next then
- next.prev = base
+ setfield(next,"prev",base)
end
- base.next = next
- base.prev = prev
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
return head, base
end
end
@@ -379,50 +501,64 @@ end
-- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the
-- third component.
-local function getcomponentindex(start)
- if start.id ~= glyph_code then
+local function getcomponentindex(start) -- we could store this offset in the glyph (nofcomponents)
+ if getid(start) ~= glyph_code then -- and then get rid of all components
return 0
- elseif start.subtype == ligature_code then
+ elseif getsubtype(start) == ligature_code then
local i = 0
- local components = start.components
+ local components = getfield(start,"components")
while components do
i = i + getcomponentindex(components)
- components = components.next
+ components = getnext(components)
end
return i
- elseif not marks[start.char] then
+ elseif not marks[getchar(start)] then
return 1
else
return 0
end
end
--- eventually we will do positioning in an other way (needs addional w/h/d fields)
+local a_noligature = attributes.private("noligature")
local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
- if start == stop and start.char == char then
- start.char = char
+ if getattr(start,a_noligature) == 1 then
+ -- so we can do: e\noligature{ff}e e\noligature{f}fie (we only look at the first)
+ return head, start
+ end
+ if start == stop and getchar(start) == char then
+ resetinjection(start)
+ setfield(start,"char",char)
return head, start
end
- local prev = start.prev
- local next = stop.next
- start.prev = nil
- stop.next = nil
+ -- needs testing (side effects):
+ local components = getfield(start,"components")
+ if components then
+ -- we get a double free .. needs checking
+ -- flush_node_list(components)
+ end
+ --
+ local prev = getprev(start)
+ local next = getnext(stop)
+ local comp = start
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
local base = copy_glyph(start)
if start == head then
head = base
end
- base.char = char
- base.subtype = ligature_code
- base.components = start -- start can have components
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",comp) -- start can have components ... do we need to flush?
if prev then
- prev.next = base
+ setfield(prev,"next",base)
end
if next then
- next.prev = base
+ setfield(next,"prev",base)
end
- base.next = next
- base.prev = prev
+ setfield(base,"prev",prev)
+ setfield(base,"next",next)
if not discfound then
local deletemarks = markflag ~= "mark"
local components = start
@@ -432,46 +568,117 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
local current = base
-- first we loop over the glyphs in start .. stop
while start do
- local char = start.char
+ local char = getchar(start)
if not marks[char] then
baseindex = baseindex + componentindex
componentindex = getcomponentindex(start)
elseif not deletemarks then -- quite fishy
- start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
+ setligaindex(start,baseindex + getligaindex(start,componentindex))
if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
end
- head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components
+ local n = copy_node(start)
+ copyinjection(n,start)
+ head, current = insert_node_after(head,current,n) -- unlikely that mark has components
elseif trace_marks then
logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
end
- start = start.next
+ start = getnext(start)
end
-- we can have one accent as part of a lookup and another following
-- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
- local start = current.next
- while start and start.id == glyph_code do
- local char = start.char
+ local start = getnext(current)
+ while start and getid(start) == glyph_code do
+ local char = getchar(start)
if marks[char] then
- start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
+ setligaindex(start,baseindex + getligaindex(start,componentindex))
if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
end
else
break
end
- start = start.next
+ start = getnext(start)
+ end
+ else
+ -- discfound ... forget about marks .. probably no scripts that hyphenate and have marks
+ local discprev = getfield(discfound,"prev")
+ local discnext = getfield(discfound,"next")
+ if discprev and discnext then
+ -- we assume normalization in context, and don't care about generic ... especially
+ -- \- can give problems as there we can have a negative char but that won't match
+ -- anyway
+ local pre = getfield(discfound,"pre")
+ local post = getfield(discfound,"post")
+ local replace = getfield(discfound,"replace")
+ if not replace then -- todo: signal simple hyphen
+ local prev = getfield(base,"prev")
+ local copied = copy_node_list(comp)
+ setfield(discnext,"prev",nil) -- also blocks funny assignments
+ setfield(discprev,"next",nil) -- also blocks funny assignments
+ if pre then
+ setfield(discprev,"next",pre)
+ setfield(pre,"prev",discprev)
+ end
+ pre = comp
+ if post then
+ local tail = find_node_tail(post)
+ setfield(tail,"next",discnext)
+ setfield(discnext,"prev",tail)
+ setfield(post,"prev",nil)
+ else
+ post = discnext
+ end
+ setfield(prev,"next",discfound)
+ setfield(discfound,"prev",prev)
+ setfield(discfound,"next",next)
+ setfield(next,"prev",discfound)
+ setfield(base,"next",nil)
+ setfield(base,"prev",nil)
+ setfield(base,"components",copied)
+ setfield(discfound,"pre",pre)
+ setfield(discfound,"post",post)
+ setfield(discfound,"replace",base)
+ setfield(discfound,"subtype",discretionary_code)
+ base = prev -- restart
+ end
end
end
return head, base
end
-function handlers.gsub_single(head,start,kind,lookupname,replacement)
- if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
+local function multiple_glyphs(head,start,multiple,ignoremarks)
+ local nofmultiples = #multiple
+ if nofmultiples > 0 then
+ resetinjection(start)
+ setfield(start,"char",multiple[1])
+ if nofmultiples > 1 then
+ local sn = getnext(start)
+ for k=2,nofmultiples do -- todo: use insert_node
+-- untested:
+--
+-- while ignoremarks and marks[getchar(sn)] then
+-- local sn = getnext(sn)
+-- end
+ local n = copy_node(start) -- ignore components
+ resetinjection(n)
+ setfield(n,"char",multiple[k])
+ setfield(n,"prev",start)
+ setfield(n,"next",sn)
+ if sn then
+ setfield(sn,"prev",n)
+ end
+ setfield(start,"next",n)
+ start = n
+ end
+ end
+ return head, start, true
+ else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(getchar(start)))
+ end
+ return head, start, false
end
- start.char = replacement
- return head, start, true
end
local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
@@ -497,7 +704,7 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives
return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
end
elseif value == 0 then
- return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ return getchar(start), trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
elseif value < 1 then
return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
else
@@ -506,36 +713,15 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives
end
end
-local function multiple_glyphs(head,start,multiple,ignoremarks)
- local nofmultiples = #multiple
- if nofmultiples > 0 then
- start.char = multiple[1]
- if nofmultiples > 1 then
- local sn = start.next
- for k=2,nofmultiples do -- todo: use insert_node
--- untested:
---
--- while ignoremarks and marks[sn.char] then
--- local sn = sn.next
--- end
- local n = copy_node(start) -- ignore components
- n.char = multiple[k]
- n.next = sn
- n.prev = start
- if sn then
- sn.prev = n
- end
- start.next = n
- start = n
- end
- end
- return head, start, true
- else
- if trace_multiples then
- logprocess("no multiple for %s",gref(start.char))
- end
- return head, start, false
+-- handlers
+
+function handlers.gsub_single(head,start,kind,lookupname,replacement)
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
end
+ resetinjection(start)
+ setfield(start,"char",replacement)
+ return head, start, true
end
function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
@@ -543,12 +729,13 @@ function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence
local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
if choice then
if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
end
- start.char = choice
+ resetinjection(start)
+ setfield(start,"char",choice)
else
if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment)
end
end
return head, start, true
@@ -556,23 +743,23 @@ end
function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple))
end
return multiple_glyphs(head,start,multiple,sequence.flags[1])
end
function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
- local s, stop, discfound = start.next, nil, false
- local startchar = start.char
+ local s, stop = getnext(start), nil
+ local startchar = getchar(start)
if marks[startchar] then
while s do
- local id = s.id
- if id == glyph_code and s.font == currentfont and s.subtype<256 then
- local lg = ligature[s.char]
+ local id = getid(s)
+ if id == glyph_code and getfont(s) == currentfont and getsubtype(s)<256 then
+ local lg = ligature[getchar(s)]
if lg then
stop = s
ligature = lg
- s = s.next
+ s = getnext(s)
else
break
end
@@ -584,32 +771,38 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
local lig = ligature.ligature
if lig then
if trace_ligatures then
- local stopchar = stop.char
+ local stopchar = getchar(stop)
head, start = markstoligature(kind,lookupname,head,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
else
head, start = markstoligature(kind,lookupname,head,start,stop,lig)
end
- return head, start, true
+ return head, start, true, false
else
-- ok, goto next lookup
end
end
else
- local skipmark = sequence.flags[1]
+ local skipmark = sequence.flags[1]
+ local discfound = false
+ local lastdisc = nil
while s do
- local id = s.id
- if id == glyph_code and s.subtype<256 then
- if s.font == currentfont then
- local char = s.char
+ local id = getid(s)
+ if id == glyph_code and getsubtype(s)<256 then -- not needed
+ if getfont(s) == currentfont then -- also not needed only when mark
+ local char = getchar(s)
if skipmark and marks[char] then
- s = s.next
- else
- local lg = ligature[char]
+ s = getnext(s)
+ else -- ligature is a tree
+ local lg = ligature[char] -- can there be multiple in a row? maybe in a bad font
if lg then
- stop = s
+ if not discfound and lastdisc then
+ discfound = lastdisc
+ lastdisc = nil
+ end
+ stop = s -- needed for fake so outside then
ligature = lg
- s = s.next
+ s = getnext(s)
else
break
end
@@ -618,54 +811,128 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
break
end
elseif id == disc_code then
- discfound = true
- s = s.next
+ lastdisc = s
+ s = getnext(s)
else
break
end
end
- local lig = ligature.ligature
+ local lig = ligature.ligature -- can't we get rid of this .ligature?
if lig then
if stop then
if trace_ligatures then
- local stopchar = stop.char
+ local stopchar = getchar(stop)
head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
else
head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
end
- return head, start, true
else
-- weird but happens (in some arabic font)
- start.char = lig
+ resetinjection(start)
+ setfield(start,"char",lig)
if trace_ligatures then
logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
end
- return head, start, true
end
+ return head, start, true, discfound
else
- -- weird but happens
+ -- weird but happens, pseudo ligatures ... just the components
end
end
+ return head, start, false, discfound
+end
+
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence,injection)
+ local startchar = getchar(start)
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,injection) -- ,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ end
return head, start, false
end
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence,lookuphash,i,injection)
+ -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
+ -- todo: kerns in components of ligatures
+ local snext = getnext(start)
+ if not snext then
+ return head, start, false
+ else
+ local prev = start
+ local done = false
+ local factor = tfmdata.parameters.factor
+ local lookuptype = lookuptypes[lookupname]
+ while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
+ local nextchar = getchar(snext)
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = getnext(snext)
+ else
+ if not krn then
+ -- skip
+ elseif type(krn) == "table" then
+ if lookuptype == "pair" then -- probably not needed
+ local a, b = krn[2], krn[3]
+ if a and #a > 0 then
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,injection) -- characters[startchar])
+ if trace_kerns then
+ local startchar = getchar(start)
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,injection) -- characters[nextchar])
+ if trace_kerns then
+ local startchar = getchar(start)
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else -- wrong ... position has different entries
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ -- local a, b = krn[2], krn[6]
+ -- if a and a ~= 0 then
+ -- local k = setkern(snext,factor,rlmode,a)
+ -- if trace_kerns then
+ -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
+ -- end
+ -- end
+ -- if b and b ~= 0 then
+ -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
+ -- end
+ end
+ done = true
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn,injection)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar)) -- prev?
+ end
+ done = true
+ end
+ break
+ end
+ end
+ return head, start, done
+ end
+end
+
--[[ldx--
<p>We get hits on a mark, but we're not sure if the it has to be applied so
we need to explicitly test for basechar, baselig and basemark entries.</p>
--ldx]]--
function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
+ local base = getprev(start) -- [glyph] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
if marks[basechar] then
while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
if not marks[basechar] then
break
end
@@ -689,7 +956,7 @@ function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -717,16 +984,16 @@ end
function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
-- check chainpos variant
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
+ local base = getprev(start) -- [glyph] [optional marks] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
if marks[basechar] then
while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
if not marks[basechar] then
break
end
@@ -738,7 +1005,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
end
end
- local index = start[a_ligacomp]
+ local index = getligaindex(start)
local baseanchors = descriptions[basechar]
if baseanchors then
baseanchors = baseanchors.anchors
@@ -752,7 +1019,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
if ma then
ba = ba[index]
if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) -- index
if trace_marks then
logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -785,22 +1052,22 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- local slc = start[a_ligacomp]
+ local base = getprev(start) -- [glyph] [basemark] [start=mark]
+ local slc = getligaindex(start)
if slc then -- a rather messy loop ... needs checking with husayni
while base do
- local blc = base[a_ligacomp]
+ local blc = getligaindex(base)
if blc and blc ~= slc then
- base = base.prev
+ base = getprev(base)
else
break
end
end
end
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
- local basechar = base.char
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
+ local basechar = getchar(base)
local baseanchors = descriptions[basechar]
if baseanchors then
baseanchors = baseanchors.anchors
@@ -812,7 +1079,7 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar],true)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -840,21 +1107,21 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
end
function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and start[a_cursbase]
+ local alreadydone = cursonce and getprop(start,a_cursbase)
if not alreadydone then
local done = false
- local startchar = start.char
+ local startchar = getchar(start)
if marks[startchar] then
if trace_cursive then
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
- local nextchar = nxt.char
+ local nxt = getnext(start)
+ while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
+ local nextchar = getchar(nxt)
if marks[nextchar] then
-- should not happen (maybe warning)
- nxt = nxt.next
+ nxt = getnext(nxt)
else
local entryanchors = descriptions[nextchar]
if entryanchors then
@@ -889,91 +1156,17 @@ function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
return head, start, done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
end
return head, start, false
end
end
-function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
- local startchar = start.char
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
- end
- return head, start, false
-end
-
-function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
- -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
- -- todo: kerns in components of ligatures
- local snext = start.next
- if not snext then
- return head, start, false
- else
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- local lookuptype = lookuptypes[lookupname]
- while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
- local nextchar = snext.char
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then -- probably not needed
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else -- wrong ... position has different entries
- report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
- -- local a, b = krn[2], krn[6]
- -- if a and a ~= 0 then
- -- local k = setkern(snext,factor,rlmode,a)
- -- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- -- end
- -- end
- -- if b and b ~= 0 then
- -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
- -- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return head, start, done
- end
-end
-
--[[ldx--
<p>I will implement multiple chain replacements once I run into a font that uses
it. It's not that complex to handle.</p>
--ldx]]--
-local chainmores = { }
local chainprocs = { }
local function logprocess(...)
@@ -1002,23 +1195,19 @@ function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,looku
return head, start, false
end
-function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
- logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return head, start, false
-end
-
-- The reversesub is a special case, which is why we need to store the replacements
-- in a bit weird way. There is no lookup and the replacement comes from the lookup
-- itself. It is meant mostly for dealing with Urdu.
function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char = start.char
+ local char = getchar(start)
local replacement = replacements[char]
if replacement then
if trace_singles then
logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
end
- start.char = replacement
+ resetinjection(start)
+ setfield(start,"char",replacement)
return head, start, true
else
return head, start, false
@@ -1047,9 +1236,9 @@ as less as needed but that would also make the code even more messy.</p>
-- -- done
-- elseif ignoremarks then
-- repeat -- start x x m x x stop => start m
--- local next = start.next
--- if not marks[next.char] then
--- local components = next.components
+-- local next = getnext(start)
+-- if not marks[getchar(next)] then
+-- local components = getfield(next,"components")
-- if components then -- probably not needed
-- flush_node_list(components)
-- end
@@ -1059,8 +1248,8 @@ as less as needed but that would also make the code even more messy.</p>
-- until next == stop
-- else -- start x x x stop => start
-- repeat
--- local next = start.next
--- local components = next.components
+-- local next = getnext(start)
+-- local components = getfield(next,"components")
-- if components then -- probably not needed
-- flush_node_list(components)
-- end
@@ -1072,8 +1261,7 @@ as less as needed but that would also make the code even more messy.</p>
-- end
--[[ldx--
-<p>Here we replace start by a single variant, First we delete the rest of the
-match.</p>
+<p>Here we replace start by a single variant.</p>
--ldx]]--
function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
@@ -1081,11 +1269,11 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
local current = start
local subtables = currentlookup.subtables
if #subtables > 1 then
- logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ logwarning("todo: check if we need to loop over the replacements: % t",subtables)
end
while current do
- if current.id == glyph_code then
- local currentchar = current.char
+ if getid(current) == glyph_code then
+ local currentchar = getchar(current)
local lookupname = subtables[1] -- only 1
local replacement = lookuphash[lookupname]
if not replacement then
@@ -1102,29 +1290,27 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
if trace_singles then
logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
end
- current.char = replacement
+ resetinjection(current)
+ setfield(current,"char",replacement)
end
end
return head, start, true
elseif current == stop then
break
else
- current = current.next
+ current = getnext(current)
end
end
return head, start, false
end
-chainmores.gsub_single = chainprocs.gsub_single
-
--[[ldx--
-<p>Here we replace start by a sequence of new glyphs. First we delete the rest of
-the match.</p>
+<p>Here we replace start by a sequence of new glyphs.</p>
--ldx]]--
function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
-- local head, n = delete_till_stop(head,start,stop)
- local startchar = start.char
+ local startchar = getchar(start)
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local replacements = lookuphash[lookupname]
@@ -1148,8 +1334,6 @@ function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,
return head, start, false
end
-chainmores.gsub_multiple = chainprocs.gsub_multiple
-
--[[ldx--
<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
--ldx]]--
@@ -1167,8 +1351,8 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
local subtables = currentlookup.subtables
local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
while current do
- if current.id == glyph_code then -- is this check needed?
- local currentchar = current.char
+ if getid(current) == glyph_code then -- is this check needed?
+ local currentchar = getchar(current)
local lookupname = subtables[1]
local alternatives = lookuphash[lookupname]
if not alternatives then
@@ -1183,7 +1367,8 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
if trace_alternatives then
logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
end
- start.char = choice
+ resetinjection(start)
+ setfield(start,"char",choice)
else
if trace_alternatives then
logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
@@ -1197,14 +1382,12 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
elseif current == stop then
break
else
- current = current.next
+ current = getnext(current)
end
end
return head, start, false
end
-chainmores.gsub_alternate = chainprocs.gsub_alternate
-
--[[ldx--
<p>When we replace ligatures we use a helper that handles the marks. I might change
this function (move code inline and handle the marks by a separate function). We
@@ -1212,7 +1395,7 @@ assume rather stupid ligatures (no complex disc nodes).</p>
--ldx]]--
function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar = start.char
+ local startchar = getchar(start)
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local ligatures = lookuphash[lookupname]
@@ -1227,20 +1410,26 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
end
else
- local s = start.next
+ local s = getnext(start)
local discfound = false
local last = stop
- local nofreplacements = 0
+ local nofreplacements = 1
local skipmark = currentlookup.flags[1]
while s do
- local id = s.id
+ local id = getid(s)
if id == disc_code then
- s = s.next
- discfound = true
+ if not discfound then
+ discfound = s
+ end
+ if s == stop then
+ break -- okay? or before the disc
+ else
+ s = getnext(s)
+ end
else
- local schar = s.char
+ local schar = getchar(s)
if skipmark and marks[schar] then -- marks
- s = s.next
+ s = getnext(s)
else
local lg = ligatures[schar]
if lg then
@@ -1248,7 +1437,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if s == stop then
break
else
- s = s.next
+ s = getnext(s)
end
else
break
@@ -1265,27 +1454,113 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if start == stop then
logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
else
- logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
end
end
head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
- return head, start, true, nofreplacements
+ return head, start, true, nofreplacements, discfound
elseif trace_bugs then
if start == stop then
logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
end
end
end
end
- return head, start, false, 0
+ return head, start, false, 0, false
end
-chainmores.gsub_ligature = chainprocs.gsub_ligature
+function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ -- untested .. needs checking for the new model
+ local startchar = getchar(start)
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local kerns = lookuphash[lookupname]
+ if kerns then
+ kerns = kerns[startchar] -- needed ?
+ if kerns then
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns) -- ,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ end
+ end
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local snext = getnext(start)
+ if snext then
+ local startchar = getchar(start)
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local kerns = lookuphash[lookupname]
+ if kerns then
+ kerns = kerns[startchar]
+ if kerns then
+ local lookuptype = lookuptypes[lookupname]
+ local prev, done = start, false
+ local factor = tfmdata.parameters.factor
+ while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
+ local nextchar = getchar(snext)
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = getnext(snext)
+ else
+ if not krn then
+ -- skip
+ elseif type(krn) == "table" then
+ if lookuptype == "pair" then
+ local a, b = krn[2], krn[3]
+ if a and #a > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a) -- ,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b) -- ,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ -- local a, b = krn[2], krn[6]
+ -- if a and a ~= 0 then
+ -- local k = setkern(snext,factor,rlmode,a)
+ -- if trace_kerns then
+ -- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ -- end
+ -- end
+ -- if b and b ~= 0 then
+ -- logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
+ -- end
+ end
+ done = true
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ end
+ done = true
+ end
+ break
+ end
+ end
+ return head, start, done
+ end
+ end
+ end
+ return head, start, false
+end
function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
local subtables = currentlookup.subtables
local lookupname = subtables[1]
@@ -1294,14 +1569,14 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
markanchors = markanchors[markchar]
end
if markanchors then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
+ local base = getprev(start) -- [glyph] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
if marks[basechar] then
while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
if not marks[basechar] then
break
end
@@ -1322,7 +1597,7 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -1349,7 +1624,7 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
end
function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
local subtables = currentlookup.subtables
local lookupname = subtables[1]
@@ -1358,14 +1633,14 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
markanchors = markanchors[markchar]
end
if markanchors then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
+ local base = getprev(start) -- [glyph] [optional marks] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
if marks[basechar] then
while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
if not marks[basechar] then
break
end
@@ -1378,7 +1653,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
end
-- todo: like marks a ligatures hash
- local index = start[a_ligacomp]
+ local index = getligaindex(start)
local baseanchors = descriptions[basechar].anchors
if baseanchors then
local baseanchors = baseanchors['baselig']
@@ -1390,7 +1665,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
if ma then
ba = ba[index]
if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -1418,64 +1693,59 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
- -- local alreadydone = markonce and start[a_markmark]
- -- if not alreadydone then
- -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- local slc = start[a_ligacomp]
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = base[a_ligacomp]
- if blc and blc ~= slc then
- base = base.prev
- else
- break
- end
+ -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = getprev(start) -- [glyph] [basemark] [start=mark]
+ local slc = getligaindex(start)
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = getligaindex(base)
+ if blc and blc ~= slc then
+ base = getprev(base)
+ else
+ break
end
end
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar].anchors
+ end
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
+ local basechar = getchar(base)
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors = baseanchors['basemark']
if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar],true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
+ return head, start, true
end
end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
end
end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
end
elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
end
- -- elseif trace_marks and trace_details then
- -- logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone)
- -- end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
elseif trace_bugs then
logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
end
@@ -1483,9 +1753,9 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
end
function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone = cursonce and start[a_cursbase]
+ local alreadydone = cursonce and getprop(start,a_cursbase)
if not alreadydone then
- local startchar = start.char
+ local startchar = getchar(start)
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local exitanchors = lookuphash[lookupname]
@@ -1499,12 +1769,12 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
- local nextchar = nxt.char
+ local nxt = getnext(start)
+ while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
+ local nextchar = getchar(nxt)
if marks[nextchar] then
-- should not happen (maybe warning)
- nxt = nxt.next
+ nxt = getnext(nxt)
else
local entryanchors = descriptions[nextchar]
if entryanchors then
@@ -1539,7 +1809,7 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
return head, start, done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
end
return head, start, false
end
@@ -1547,127 +1817,346 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
return head, start, false
end
-function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- -- untested .. needs checking for the new model
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar] -- needed ?
- if kerns then
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+-- what pointer to return, spec says stop
+-- to be discussed ... is bidi changer a space?
+-- elseif char == zwnj and sequence[n][32] then -- brrr
+
+-- somehow l or f is global
+-- we don't need to pass the currentcontext, saves a bit
+-- make a slow variant then can be activated but with more tracing
+
+local function show_skip(kind,chainname,char,ck,class)
+ if ck[9] then
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
+end
+
+-- A previous version had disc collapsing code in the (single sub) handler plus some
+-- checking in the main loop, but that left the pre/post sequences undone. The best
+-- solution is to add some checking there and backtrack when a replace/post matches
+-- but it takes a bit of work to figure out an efficient way (this is what the sweep*
+-- names refer to). I might look into that variant one day again as it can replace
+-- some other code too. In that approach we can have a special version for gub and pos
+-- which gains some speed. This method does the test and passes info to the handlers
+-- (sweepnode, sweepmode, sweepprev, sweepnext, etc). Here collapsing is handled in the
+-- main loop which also makes code elsewhere simpler (i.e. no need for the other special
+-- runners and disc code in ligature building). I also experimented with pushing preceding
+-- glyphs sequences in the replace/pre fields beforehand which saves checking afterwards
+-- but at the cost of duplicate glyphs (memory) but it's too much overhead (runtime).
+--
+-- In the meantime Kai had moved the code from the single chain into a more general handler
+-- and this one (renamed to chaindisk) is used now. I optimized the code a bit and brought
+-- it in sycn with the other code. Hopefully I didn't introduce errors. Note: this somewhat
+-- complex approach is meant for fonts that implement (for instance) ligatures by character
+-- replacement which to some extend is not that suitable for hyphenation. I also use some
+-- helpers. This method passes some states but reparses the list. There is room for a bit of
+-- speed up but that will be done in the context version. (In fact a partial rewrite of all
+-- code can bring some more efficientry.)
+--
+-- I didn't test it with extremes but successive disc nodes still can give issues but in
+-- order to handle that we need more complex code which also slows down even more. The main
+-- loop variant could deal with that: test, collapse, backtrack.
+
+local function chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,chainindex,sequence,chainproc)
+
+ if not start then
+ return head, start, false
+ end
+
+ local startishead = start == head
+ local seq = ck[3]
+ local f = ck[4]
+ local l = ck[5]
+ local s = #seq
+ local done = false
+ local sweepnode = sweepnode
+ local sweeptype = sweeptype
+ local sweepoverflow = false
+ local checkdisc = getprev(head) -- hm bad name head
+ local keepdisc = not sweepnode
+ local lookaheaddisc = nil
+ local backtrackdisc = nil
+ local current = start
+ local last = start
+ local prev = getprev(start)
+
+ -- fishy: so we can overflow and then go on in the sweep?
+
+ local i = f
+ while i <= l do
+ local id = getid(current)
+ if id == glyph_code then
+ i = i + 1
+ last = current
+ current = getnext(current)
+ elseif id == disc_code then
+ if keepdisc then
+ keepdisc = false
+ if notmatchpre[current] ~= notmatchreplace[current] then
+ lookaheaddisc = current
+ end
+ local replace = getfield(current,"replace")
+ while replace and i <= l do
+ if getid(replace) == glyph_code then
+ i = i + 1
+ end
+ replace = getnext(replace)
+ end
+ last = current
+ current = getnext(c)
+ else
+ head, current = flattendisk(head,current)
+ end
+ else
+ last = current
+ current = getnext(current)
+ end
+ if current then
+ -- go on
+ elseif sweepoverflow then
+ -- we already are folling up on sweepnode
+ break
+ elseif sweeptype == "post" or sweeptype == "replace" then
+ current = getnext(sweepnode)
+ if current then
+ sweeptype = nil
+ sweepoverflow = true
+ else
+ break
end
end
end
- return head, start, false
-end
-chainmores.gpos_single = chainprocs.gpos_single -- okay?
+ if sweepoverflow then
+ local prev = current and getprev(current)
+ if not current or prev ~= sweepnode then
+ local head = getnext(sweepnode)
+ local tail = nil
+ if prev then
+ tail = prev
+ setfield(current,"prev",sweepnode)
+ else
+ tail = find_node_tail(head)
+ end
+ setfield(sweepnode,"next",current)
+ setfield(head,"prev",nil)
+ setfield(tail,"next",nil)
+ appenddisc(sweepnode,head)
+ end
+ end
--- when machines become faster i will make a shared function
+ if l < s then
+ local i = l
+ local t = sweeptype == "post" or sweeptype == "replace"
+ while current and i < s do
+ local id = getid(current)
+ if id == glyph_code then
+ i = i + 1
+ current = getnext(current)
+ elseif id == disc_code then
+ if keepdisc then
+ keepdisc = false
+ if notmatchpre[current] ~= notmatchreplace[current] then
+ lookaheaddisc = current
+ end
+ local replace = getfield(c,"replace")
+ while replace and i < s do
+ if getid(replace) == glyph_code then
+ i = i + 1
+ end
+ replace = getnext(replace)
+ end
+ current = getnext(current)
+ elseif notmatchpre[current] ~= notmatchreplace[current] then
+ head, current = flattendisk(head,current)
+ else
+ current = getnext(current) -- HH
+ end
+ else
+ current = getnext(current)
+ end
+ if not current and t then
+ current = getnext(sweepnode)
+ if current then
+ sweeptype = nil
+ end
+ end
+ end
+ end
-function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- local snext = start.next
- if snext then
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar]
- if kerns then
- local lookuptype = lookuptypes[lookupname]
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
- local nextchar = snext.char
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else
- report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
- local a, b = krn[2], krn[6]
- if a and a ~= 0 then
- local k = setkern(snext,factor,rlmode,a)
- if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- end
- if b and b ~= 0 then
- logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
+ if f > 1 then
+ local current = prev
+ local i = f
+ local t = sweeptype == "pre" or sweeptype == "replace"
+ if not current and t and current == checkdisk then
+ current = getprev(sweepnode)
+ end
+ while current and i > 1 do -- missing getprev added / moved outside
+ local id = getid(current)
+ if id == glyph_code then
+ i = i - 1
+ elseif id == disc_code then
+ if keepdisc then
+ keepdisc = false
+ if notmatchpost[current] ~= notmatchreplace[current] then
+ backtrackdisc = current
+ end
+ local replace = getfield(current,"replace")
+ while replace and i > 1 do
+ if getid(replace) == glyph_code then
+ i = i - 1
end
- break
+ replace = getnext(replace)
end
+ elseif notmatchpost[current] ~= notmatchreplace[current] then
+ head, current = flattendisk(head,current)
end
- return head, start, done
+ end
+ current = getprev(current)
+ if t and current == checkdisk then
+ current = getprev(sweepnode)
end
end
end
- return head, start, false
-end
-chainmores.gpos_pair = chainprocs.gpos_pair -- okay?
+ local ok = false
+ if lookaheaddisc then
--- what pointer to return, spec says stop
--- to be discussed ... is bidi changer a space?
--- elseif char == zwnj and sequence[n][32] then -- brrr
+ local cf = start
+ local cl = getprev(lookaheaddisc)
+ local cprev = getprev(start)
+ local insertedmarks = 0
--- somehow l or f is global
--- we don't need to pass the currentcontext, saves a bit
--- make a slow variant then can be activated but with more tracing
+ while cprev and getid(cf) == glyph_code and getfont(cf) == currentfont and getsubtype(cf) < 256 and marks[getchar(cf)] do
+ insertedmarks = insertedmarks + 1
+ cf = cprev
+ startishead = cf == head
+ cprev = getprev(cprev)
+ end
+
+ setfield(lookaheaddisc,"prev",cprev)
+ if cprev then
+ setfield(cprev,"next",lookaheaddisc)
+ end
+ setfield(cf,"prev",nil)
+ setfield(cl,"next",nil)
+ if startishead then
+ head = lookaheaddisc
+ end
+
+ local replace = getfield(lookaheaddisc,"replace")
+ local pre = getfield(lookaheaddisc,"pre")
+ local new = copy_node_list(cf)
+ local cnew = new
+ for i=1,insertedmarks do
+ cnew = getnext(cnew)
+ end
+ local clast = cnew
+ for i=f,l do
+ clast = getnext(clast)
+ end
+ if not notmatchpre[lookaheaddisc] then
+ cf, start, ok = chainproc(cf,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ end
+ if not notmatchreplace[lookaheaddisc] then
+ new, cnew, ok = chainproc(new,cnew,clast,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ end
+ if pre then
+ setfield(cl,"next",pre)
+ setfield(pre,"prev",cl)
+ end
+ if replace then
+ local tail = find_node_tail(new)
+ setfield(tail,"next",replace)
+ setfield(replace,"prev",tail)
+ end
+ setfield(lookaheaddisc,"pre",cf) -- also updates tail
+ setfield(lookaheaddisc,"replace",new) -- also updates tail
+
+ start = getprev(lookaheaddisc)
+ sweephead[cf] = getnext(clast)
+ sweephead[new] = getnext(last)
+
+ elseif backtrackdisc then
+
+ local cf = getnext(backtrackdisc)
+ local cl = start
+ local cnext = getnext(start)
+ local insertedmarks = 0
+
+ while cnext and getid(cnext) == glyph_code and getfont(cnext) == currentfont and getsubtype(cnext) < 256 and marks[getchar(cnext)] do
+ insertedmarks = insertedmarks + 1
+ cl = cnext
+ cnext = getnext(cnext)
+ end
+ if cnext then
+ setfield(cnext,"prev",backtrackdisc)
+ end
+ setfield(backtrackdisc,"next",cnext)
+ setfield(cf,"prev",nil)
+ setfield(cl,"next",nil)
+ local replace = getfield(backtrackdisc,"replace")
+ local post = getfield(backtrackdisc,"post")
+ local new = copy_node_list(cf)
+ local cnew = find_node_tail(new)
+ for i=1,insertedmarks do
+ cnew = getprev(cnew)
+ end
+ local clast = cnew
+ for i=f,l do
+ clast = getnext(clast)
+ end
+ if not notmatchpost[backtrackdisc] then
+ cf, start, ok = chainproc(cf,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ end
+ if not notmatchreplace[backtrackdisc] then
+ new, cnew, ok = chainproc(new,cnew,clast,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ end
+ if post then
+ local tail = find_node_tail(post)
+ setfield(tail,"next",cf)
+ setfield(cf,"prev",tail)
+ else
+ post = cf
+ end
+ if replace then
+ local tail = find_node_tail(replace)
+ setfield(tail,"next",new)
+ setfield(new,"prev",tail)
+ else
+ replace = new
+ end
+ setfield(backtrackdisc,"post",post) -- also updates tail
+ setfield(backtrackdisc,"replace",replace) -- also updates tail
+ start = getprev(backtrackdisc)
+ sweephead[post] = getnext(clast)
+ sweephead[replace] = getnext(last)
-local function show_skip(kind,chainname,char,ck,class)
- if ck[9] then
- logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
else
- logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+
+ head, start, ok = chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+
end
+
+ return head, start, ok
end
local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
- -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
+ local sweepnode = sweepnode
+ local sweeptype = sweeptype
+ local diskseen = false
+ local checkdisc = getprev(head)
local flags = sequence.flags
local done = false
local skipmark = flags[1]
local skipligature = flags[2]
local skipbase = flags[3]
- local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !)
- local markclass = sequence.markclass -- todo, first we need a proper test
+ local markclass = sequence.markclass
local skipped = false
- for k=1,#contexts do
+
+ for k=1,#contexts do -- i've only seen ccmp having > 1 (e.g. dejavu)
local match = true
local current = start
local last = start
@@ -1677,11 +2166,12 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
-- f..l = mid string
if s == 1 then
-- never happens
- match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char]
+ match = getid(current) == glyph_code and getfont(current) == currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
else
-- maybe we need a better space check (maybe check for glue or category or combination)
-- we cannot optimize for n=2 because there can be disc nodes
- local f, l = ck[4], ck[5]
+ local f = ck[4]
+ local l = ck[5]
-- current match
if f == 1 and f == l then -- current only
-- already a hit
@@ -1691,42 +2181,106 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if f == l then -- new, else last out of sync (f is > 1)
-- match = true
else
+ local discfound = nil
local n = f + 1
- last = last.next
+ last = getnext(last)
while n <= l do
+ if not last and (sweeptype == "post" or sweeptype == "replace") then
+ last = getnext(sweepnode)
+ sweeptype = nil
+ end
if last then
- local id = last.id
+ local id = getid(last)
if id == glyph_code then
- if last.font == currentfont and last.subtype<256 then
- local char = last.char
+ if getfont(last) == currentfont and getsubtype(last)<256 then
+ local char = getchar(last)
local ccd = descriptions[char]
if ccd then
- local class = ccd.class
+ local class = ccd.class or "base"
if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
skipped = true
if trace_skips then
show_skip(kind,chainname,char,ck,class)
end
- last = last.next
+ last = getnext(last)
elseif seq[n][char] then
if n < l then
- last = last.next
+ last = getnext(last)
end
n = n + 1
else
- match = false
+ if discfound then
+ notmatchreplace[discfound] = true
+ match = not notmatchpre[discfound]
+ else
+ match = false
+ end
break
end
else
- match = false
+ if discfound then
+ notmatchreplace[discfound] = true
+ match = not notmatchpre[discfound]
+ else
+ match = false
+ end
break
end
else
- match = false
+ if discfound then
+ notmatchreplace[discfound] = true
+ match = not notmatchpre[discfound]
+ else
+ match = false
+ end
break
end
elseif id == disc_code then
- last = last.next
+ diskseen = true
+ discfound = last
+ notmatchpre[last] = nil
+ notmatchpost[last] = true
+ notmatchreplace[last] = nil
+ local pre = getfield(last,"pre")
+ local replace = getfield(last,"replace")
+ if pre then
+ local n = n
+ while pre do
+ if seq[n][getchar(pre)] then
+ n = n + 1
+ pre = getnext(pre)
+ if n > l then
+ break
+ end
+ else
+ notmatchpre[last] = true
+ break
+ end
+ end
+ if n <= l then
+ notmatchpre[last] = true
+ end
+ else
+ notmatchpre[last] = true
+ end
+ if replace then
+ -- so far we never entered this branch
+ while replace do
+ if seq[n][getchar(replace)] then
+ n = n + 1
+ replace = getnext(replace)
+ if n > l then
+ break
+ end
+ else
+ notmatchreplace[last] = true
+ match = not notmatchpre[last]
+ break
+ end
+ end
+ match = not notmatchpre[last]
+ end
+ last = getnext(last)
else
match = false
break
@@ -1740,76 +2294,163 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
-- before
if match and f > 1 then
- local prev = start.prev
+ local prev = getprev(start)
if prev then
- local n = f-1
- while n >= 1 do
- if prev then
- local id = prev.id
- if id == glyph_code then
- if prev.font == currentfont and prev.subtype<256 then -- normal char
- local char = prev.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
+ if prev == checkdisc and (sweeptype == "pre" or sweeptype == "replace") then
+ prev = getprev(sweepnode)
+ -- sweeptype = nil
+ end
+ if prev then
+ local discfound = nil
+ local n = f - 1
+ while n >= 1 do
+ if prev then
+ local id = getid(prev)
+ if id == glyph_code then
+ if getfont(prev) == currentfont and getsubtype(prev)<256 then -- normal char
+ local char = getchar(prev)
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n = n -1
+ else
+ if discfound then
+ notmatchreplace[discfound] = true
+ match = not notmatchpost[discfound]
+ else
+ match = false
+ end
+ break
end
- elseif seq[n][char] then
- n = n -1
else
- match = false
+ if discfound then
+ notmatchreplace[discfound] = true
+ match = not notmatchpost[discfound]
+ else
+ match = false
+ end
break
end
else
- match = false
+ if discfound then
+ notmatchreplace[discfound] = true
+ match = not notmatchpost[discfound]
+ else
+ match = false
+ end
break
end
+ elseif id == disc_code then
+ -- the special case: f i where i becomes dottless i ..
+ diskseen = true
+ discfound = prev
+ notmatchpre[prev] = true
+ notmatchpost[prev] = nil
+ notmatchreplace[prev] = nil
+ local pre = getfield(prev,"pre")
+ local post = getfield(prev,"post")
+ local replace = getfield(prev,"replace")
+ if pre ~= start and post ~= start and replace ~= start then
+ if post then
+ local n = n
+ local posttail = find_node_tail(post)
+ while posttail do
+ if seq[n][getchar(posttail)] then
+ n = n - 1
+ if posttail == post then
+ break
+ else
+ posttail = getprev(posttail)
+ if n < 1 then
+ break
+ end
+ end
+ else
+ notmatchpost[prev] = true
+ break
+ end
+ end
+ if n >= 1 then
+ notmatchpost[prev] = true
+ end
+ else
+ notmatchpost[prev] = true
+ end
+ if replace then
+ -- we seldom enter this branch (e.g. on brill efficient)
+ local replacetail = find_node_tail(replace)
+ while replacetail do
+ if seq[n][getchar(replacetail)] then
+ n = n - 1
+ if replacetail == replace then
+ break
+ else
+ replacetail = getprev(replacetail)
+ if n < 1 then
+ break
+ end
+ end
+ else
+ notmatchreplace[prev] = true
+ match = not notmatchpost[prev]
+ break
+ end
+ end
+ if not match then
+ break
+ end
+ else
+ -- skip 'm
+ end
+ else
+ -- skip 'm
+ end
+ elseif seq[n][32] then
+ n = n -1
else
match = false
break
end
- elseif id == disc_code then
- -- skip 'm
- elseif seq[n][32] then
- n = n -1
+ prev = getprev(prev)
+ elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
+ n = n - 1
else
match = false
break
end
- prev = prev.prev
- elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
- n = n -1
- else
- match = false
- break
end
+ else
+ match = false
end
- elseif f == 2 then
- match = seq[1][32]
else
- for n=f-1,1 do
- if not seq[n][32] then
- match = false
- break
- end
- end
+ match = false
end
end
-- after
if match and s > l then
- local current = last and last.next
+ local current = last and getnext(last)
+ if not current then
+ if sweeptype == "post" or sweeptype == "replace" then
+ current = getnext(sweepnode)
+ -- sweeptype = nil
+ end
+ end
if current then
+ local discfound = nil
-- removed optimization for s-l == 1, we have to deal with marks anyway
local n = l + 1
while n <= s do
if current then
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- if current.font == currentfont and current.subtype<256 then -- normal char
- local char = current.char
+ if getfont(current) == currentfont and getsubtype(current)<256 then -- normal char
+ local char = getchar(current)
local ccd = descriptions[char]
if ccd then
local class = ccd.class
@@ -1821,26 +2462,88 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
elseif seq[n][char] then
n = n + 1
else
- match = false
+ if discfound then
+ notmatchreplace[discfound] = true
+ match = not notmatchpre[discfound]
+ else
+ match = false
+ end
break
end
else
- match = false
+ if discfound then
+ notmatchreplace[discfound] = true
+ match = not notmatchpre[discfound]
+ else
+ match = false
+ end
break
end
else
- match = false
+ if discfound then
+ notmatchreplace[discfound] = true
+ match = not notmatchpre[discfound]
+ else
+ match = false
+ end
break
end
elseif id == disc_code then
- -- skip 'm
+ diskseen = true
+ discfound = current
+ notmatchpre[current] = nil
+ notmatchpost[current] = true
+ notmatchreplace[current] = nil
+ local pre = getfield(current,"pre")
+ local replace = getfield(current,"replace")
+ if pre then
+ local n = n
+ while pre do
+ if seq[n][getchar(pre)] then
+ n = n + 1
+ pre = getnext(pre)
+ if n > s then
+ break
+ end
+ else
+ notmatchpre[current] = true
+ break
+ end
+ end
+ if n <= s then
+ notmatchpre[current] = true
+ end
+ else
+ notmatchpre[current] = true
+ end
+ if replace then
+ -- so far we never entered this branch
+ while replace do
+ if seq[n][getchar(replace)] then
+ n = n + 1
+ replace = getnext(replace)
+ if n > s then
+ break
+ end
+ else
+ notmatchreplace[current] = true
+ match = notmatchpre[current]
+ break
+ end
+ end
+ if not match then
+ break
+ end
+ else
+ -- skip 'm
+ end
elseif seq[n][32] then -- brrr
n = n + 1
else
match = false
break
end
- current = current.next
+ current = getnext(current)
elseif seq[n][32] then
n = n + 1
else
@@ -1848,23 +2551,17 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
break
end
end
- elseif s-l == 1 then
- match = seq[s][32]
else
- for n=l+1,s do
- if not seq[n][32] then
- match = false
- break
- end
- end
+ match = false
end
end
end
if match then
- -- ck == currentcontext
+ -- can lookups be of a different type ?
+ local diskchain = diskseen or sweepnode
if trace_contexts then
local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
- local char = start.char
+ local char = getchar(start)
if ck[9] then
logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
@@ -1881,10 +2578,14 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
local chainlookupname = chainlookups[1]
local chainlookup = lookuptable[chainlookupname]
if chainlookup then
- local cp = chainprocs[chainlookup.type]
- if cp then
+ local chainproc = chainprocs[chainlookup.type]
+ if chainproc then
local ok
- head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ if diskchain then
+ head, start, ok = chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence,chainproc)
+ else
+ head, start, ok = chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ end
if ok then
done = true
end
@@ -1896,15 +2597,15 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
else
local i = 1
- repeat
+ while start and true do
if skipped then
- while true do
- local char = start.char
+ while true do -- todo: use properties
+ local char = getchar(start)
local ccd = descriptions[char]
if ccd then
- local class = ccd.class
+ local class = ccd.class or "base"
if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = start.next
+ start = getnext(start)
else
break
end
@@ -1913,50 +2614,75 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
end
+ -- see remark in ms standard under : LookupType 5: Contextual Substitution Subtable
local chainlookupname = chainlookups[i]
local chainlookup = lookuptable[chainlookupname]
if not chainlookup then
- -- okay, n matches, < n replacements
+ -- we just advance
i = i + 1
else
- local cp = chainmores[chainlookup.type]
- if not cp then
+ local chainproc = chainprocs[chainlookup.type]
+ if not chainproc then
-- actually an error
logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
i = i + 1
else
local ok, n
- head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ if diskchain then
+ head, start, ok = chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence,chainproc)
+ else
+ head, start, ok, n = chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ end
-- messy since last can be changed !
if ok then
done = true
- -- skip next one(s) if ligature
- i = i + (n or 1)
- else
- i = i + 1
+ if n and n > 1 then
+ -- we have a ligature (cf the spec we advance one but we really need to test it
+ -- as there are fonts out there that are fuzzy and have too many lookups:
+ --
+ -- U+1105 U+119E U+1105 U+119E : sourcehansansklight: script=hang ccmp=yes
+ --
+ if i + n > nofchainlookups then
+ -- if trace_contexts then
+ -- logprocess("%s: quitting lookups",cref(kind,chainname))
+ -- end
+ break
+ else
+ -- we need to carry one
+ end
+ end
end
+ i = i + 1
end
end
- if start then
- start = start.next
- else
- -- weird
+ if i > nofchainlookups or not start then
+ break
+ elseif start then
+ start = getnext(start)
end
- until i > nofchainlookups
+ end
end
else
local replacements = ck[7]
if replacements then
head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
else
- done = true -- can be meant to be skipped
+ done = quit_on_no_replacement -- can be meant to be skipped / quite inconsistent in fonts
if trace_contexts then
logprocess("%s: skipping match",cref(kind,chainname))
end
end
end
+ if done then
+ break -- out of contexts (new, needs checking)
+ end
end
end
+ if diskseen then -- maybe move up so that we can turn checking on/off
+ notmatchpre = { }
+ notmatchpost = { }
+ notmatchreplace = { }
+ end
return head, start, done
end
@@ -2038,14 +2764,21 @@ local autofeatures = fonts.analyzers.features -- was: constants
local function initialize(sequence,script,language,enabled)
local features = sequence.features
if features then
- for kind, scripts in next, features do
- local valid = enabled[kind]
- if valid then
- local languages = scripts[script] or scripts[wildcard]
- if languages and (languages[language] or languages[wildcard]) then
- return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence }
+ local order = sequence.order
+ if order then
+ for i=1,#order do --
+ local kind = order[i] --
+ local valid = enabled[kind]
+ if valid then
+ local scripts = features[kind] --
+ local languages = scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid, autofeatures[kind] or false, sequence, kind }
+ end
end
end
+ else
+ -- can't happen
end
end
return false
@@ -2074,49 +2807,226 @@ function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
}
rs[language] = rl
local sequences = tfmdata.resources.sequences
--- setmetatableindex(rl, function(t,k)
--- if type(k) == "number" then
--- local v = enabled and initialize(sequences[k],script,language,enabled)
--- t[k] = v
--- return v
--- end
--- end)
-for s=1,#sequences do
- local v = enabled and initialize(sequences[s],script,language,enabled)
- if v then
- rl[#rl+1] = v
+ for s=1,#sequences do
+ local v = enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1] = v
+ end
+ end
end
+ return rl
end
+
+-- assumptions:
+--
+-- * languages that use complex disc nodes
+
+local function kernrun(disc,run)
+ --
+ -- we catch <font 1><disc font 2>
+ --
+ if trace_kernruns then
+ report_run("kern") -- will be more detailed
+ end
+ --
+ local prev = getprev(disc) -- todo, keep these in the main loop
+ local next = getnext(disc) -- todo, keep these in the main loop
+ --
+ local pre = getfield(disc,"pre")
+ local post = getfield(disc,"post")
+ local replace = getfield(disc,"replace")
+ --
+ local prevmarks = prev
+ --
+ -- can be optional, because why on earth do we get a disc after a mark (okay, maybe when a ccmp
+ -- has happened but then it should be in the disc so basically this test indicates an error)
+ --
+ while prevmarks and getid(prevmarks) == glyph_code and marks[getchar(prevmarks)] and getfont(prevmarks) == currentfont and getsubtype(prevmarks) < 256 do
+ prevmarks = getprev(prevmarks)
+ end
+ --
+ if prev and (pre or replace) and not (getid(prev) == glyph_code and getfont(prev) == currentfont and getsubtype(prev)<256) then
+ prev = false
+ end
+ if next and (post or replace) and not (getid(next) == glyph_code and getfont(next) == currentfont and getsubtype(next)<256) then
+ next = false
+ end
+ --
+ if not pre then
+ -- go on
+ elseif prev then
+ local nest = getprev(pre)
+ setfield(pre,"prev",prev)
+ setfield(prev,"next",pre)
+ run(prevmarks,"preinjections")
+ setfield(pre,"prev",nest)
+ setfield(prev,"next",disc)
+ else
+ run(pre,"preinjections")
+ end
+ --
+ if not post then
+ -- go on
+ elseif next then
+ local tail = find_node_tail(post)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ run(post,"postinjections",next)
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ else
+ run(post,"postinjections")
+ end
+ --
+ if not replace and prev and next then
+ -- this should be already done by discfound
+ setfield(prev,"next",next)
+ setfield(next,"prev",prev)
+ run(prevmarks,"injections",next)
+ setfield(prev,"next",disc)
+ setfield(next,"prev",disc)
+ elseif prev and next then
+ local tail = find_node_tail(replace)
+ local nest = getprev(replace)
+ setfield(replace,"prev",prev)
+ setfield(prev,"next",replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ run(prevmarks,"replaceinjections",next)
+ setfield(replace,"prev",nest)
+ setfield(prev,"next",disc)
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ elseif prev then
+ local nest = getprev(replace)
+ setfield(replace,"prev",prev)
+ setfield(prev,"next",replace)
+ run(prevmarks,"replaceinjections")
+ setfield(replace,"prev",nest)
+ setfield(prev,"next",disc)
+ elseif next then
+ local tail = find_node_tail(replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ run(replace,"replaceinjections",next)
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ else
+ run(replace,"replaceinjections")
end
- return rl
end
--- elseif id == glue_code then
--- if p[5] then -- chain
--- local pc = pp[32]
--- if pc then
--- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4])
--- if ok then
--- done = true
--- end
--- if start then start = start.next end
--- else
--- start = start.next
--- end
--- else
--- start = start.next
--- end
+-- the if new test might be dangerous as luatex will check / set some tail stuff
+-- in a temp node
--- there will be a new direction parser (pre-parsed etc)
+local function comprun(disc,run)
+ if trace_compruns then
+ report_run("comp: %s",languages.serializediscretionary(disc))
+ end
+ --
+ local pre = getfield(disc,"pre")
+ if pre then
+ sweepnode = disc
+ sweeptype = "pre" -- in alternative code preinjections is used (also used then for proeprties, saves a variable)
+ local new, done = run(pre)
+ if done then
+ setfield(disc,"pre",new)
+ end
+ end
+ --
+ local post = getfield(disc,"post")
+ if post then
+ sweepnode = disc
+ sweeptype = "post"
+ local new, done = run(post)
+ if done then
+ setfield(disc,"post",new)
+ end
+ end
+ --
+ local replace = getfield(disc,"replace")
+ if replace then
+ sweepnode = disc
+ sweeptype = "replace"
+ local new, done = run(replace)
+ if done then
+ setfield(disc,"replace",new)
+ end
+ end
+ sweepnode = nil
+ sweeptype = nil
+end
--- less bytecode: 290 -> 254
---
--- attr = attr or false
---
--- local a = getattr(start,0)
--- if (a == attr and (not attribute or getattr(start,a_state) == attribute)) or (not attribute or getattr(start,a_state) == attribute) then
--- -- the action
--- end
+local function testrun(disc,trun,crun) -- use helper
+ local next = getnext(disc)
+ if next then
+ local replace = getfield(disc,"replace")
+ if replace then
+ local prev = getprev(disc)
+ if prev then
+ -- only look ahead
+ local tail = find_node_tail(replace)
+ -- local nest = getprev(replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ if trun(replace,next) then
+ setfield(disc,"replace",nil) -- beware, side effects of nest so first
+ setfield(prev,"next",replace)
+ setfield(replace,"prev",prev)
+ setfield(next,"prev",tail)
+ setfield(tail,"next",next)
+ setfield(disc,"prev",nil)
+ setfield(disc,"next",nil)
+ flush_node_list(disc)
+ return replace -- restart
+ else
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ end
+ else
+ -- weird case
+ end
+ else
+ -- no need
+ end
+ else
+ -- weird case
+ end
+ comprun(disc,crun)
+ return next
+end
+
+local function discrun(disc,drun,krun)
+ local next = getnext(disc)
+ local prev = getprev(disc)
+ if trace_discruns then
+ report_run("disc") -- will be more detailed
+ end
+ if next and prev then
+ setfield(prev,"next",next)
+ -- setfield(next,"prev",prev)
+ drun(prev)
+ setfield(prev,"next",disc)
+ -- setfield(next,"prev",disc)
+ end
+ --
+ local pre = getfield(disc,"pre")
+ if not pre then
+ -- go on
+ elseif prev then
+ local nest = getprev(pre)
+ setfield(pre,"prev",prev)
+ setfield(prev,"next",pre)
+ krun(prev,"preinjections")
+ setfield(pre,"prev",nest)
+ setfield(prev,"next",disc)
+ else
+ krun(pre,"preinjections")
+ end
+ return next
+end
+
+-- todo: maybe run lr and rl stretches
local function featuresprocessor(head,font,attr)
@@ -2126,6 +3036,8 @@ local function featuresprocessor(head,font,attr)
return head, false
end
+ head = tonut(head)
+
if trace_steps then
checkstep(head)
end
@@ -2139,9 +3051,11 @@ local function featuresprocessor(head,font,attr)
anchorlookups = resources.lookup_to_anchor
lookuptable = resources.lookups
lookuptypes = resources.lookuptypes
+ lookuptags = resources.lookuptags
currentfont = font
rlmode = 0
+ sweephead = { }
local sequences = resources.sequences
local done = false
@@ -2157,41 +3071,49 @@ local function featuresprocessor(head,font,attr)
-- Keeping track of the headnode is needed for devanagari (I generalized it a bit
-- so that multiple cases are also covered.)
+ -- We don't goto the next node of a disc node is created so that we can then treat
+ -- the pre, post and replace. It's abit of a hack but works out ok for most cases.
+
+ -- there can be less subtype and attr checking in the comprun etc helpers
+
for s=1,#datasets do
- local dataset = datasets[s]
- featurevalue = dataset[1] -- todo: pass to function instead of using a global
-
- local sequence = dataset[5] -- sequences[s] -- also dataset[5]
- local rlparmode = 0
- local topstack = 0
- local success = false
- local attribute = dataset[2]
- local chain = dataset[3] -- sequence.chain or 0
- local typ = sequence.type
- local subtables = sequence.subtables
- if chain < 0 then
+ local dataset = datasets[s]
+ featurevalue = dataset[1] -- todo: pass to function instead of using a global
+ local attribute = dataset[2]
+ local sequence = dataset[3] -- sequences[s] -- also dataset[5]
+ local kind = dataset[4]
+ ----- chain = dataset[5] -- sequence.chain or 0
+ local rlparmode = 0
+ local topstack = 0
+ local success = false
+ local typ = sequence.type
+ local gpossing = typ == "gpos_single" or typ == "gpos_pair" -- maybe all of them
+ local subtables = sequence.subtables
+ local handler = handlers[typ]
+ if typ == "gsub_reversecontextchain" then -- chain < 0
-- this is a limited case, no special treatments like 'init' etc
- local handler = handlers[typ]
-- we need to get rid of this slide! probably no longer needed in latest luatex
local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
if a then
a = a == attr
else
a = true
end
if a then
+ local char = getchar(start)
for i=1,#subtables do
local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[start.char]
+ local lookupmatch = lookupcache[char]
if lookupmatch then
- head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ -- todo: disc?
+ head, start, success = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
if success then
break
end
@@ -2200,213 +3122,242 @@ local function featuresprocessor(head,font,attr)
report_missing_cache(typ,lookupname)
end
end
- if start then start = start.prev end
+ if start then start = getprev(start) end
else
- start = start.prev
+ start = getprev(start)
end
else
- start = start.prev
+ start = getprev(start)
end
else
- start = start.prev
+ start = getprev(start)
end
end
else
- local handler = handlers[typ]
local ns = #subtables
local start = head -- local ?
rlmode = 0 -- to be checked ?
if ns == 1 then -- happens often
- local lookupname = subtables[1]
+ local lookupname = subtables[1]
local lookupcache = lookuphash[lookupname]
if not lookupcache then -- also check for empty cache
report_missing_cache(typ,lookupname)
else
- local function subrun(start)
- -- mostly for gsub, gpos would demand a more clever approach
- local head = start
- local done = false
+ local function c_run(head) -- no need to check for 256 and attr probably also the same
+ local done = false
+ local start = sweephead[head]
+ if start then
+ sweephead[head] = nil
+ else
+ start = head
+ end
while start do
- local id = start.id
- if id == glyph_code and start.font == font and start.subtype <256 then
- local a = start[0]
+ local id = getid(start)
+ if id ~= glyph_code then
+ -- very unlikely
+ start = getnext(start)
+ elseif getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
else
- a = not attribute or start[a_state] == attribute
+ a = not attribute or getprop(start,a_state) == attribute
end
if a then
- local lookupmatch = lookupcache[start.char]
+ local lookupmatch = lookupcache[getchar(start)]
if lookupmatch then
-- sequence kan weg
local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
if ok then
done = true
end
end
- if start then start = start.next end
+ if start then start = getnext(start) end
else
- start = start.next
+ start = getnext(start)
end
else
- start = start.next
+ return head, false
end
end
if done then
- success = true
- return head
+ success = true -- needed in this subrun?
end
+ return head, done
end
- local function kerndisc(disc) -- we can assume that prev and next are glyphs
- local prev = disc.prev
- local next = disc.next
- if prev and next then
- prev.next = next
- -- next.prev = prev
- local a = prev[0]
- if a then
- a = (a == attr) and (not attribute or prev[a_state] == attribute)
+ local function t_run(start,stop)
+ while start ~= stop do
+ local id = getid(start)
+ if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then -- hm, hyphens can match (tlig) so we need to really check
+ -- if we need more than ligatures we can outline the code and use functions
+ local s = getnext(start)
+ local l = nil
+ while s do
+ local lg = lookupmatch[getchar(s)]
+ if lg then
+ l = lg
+ s = getnext(s)
+ else
+ break
+ end
+ end
+ if l and l.ligature then
+ return true
+ end
+ end
+ end
+ start = getnext(start)
else
- a = not attribute or prev[a_state] == attribute
+ break
end
- if a then
- local lookupmatch = lookupcache[prev.char]
- if lookupmatch then
- -- sequence kan weg
- local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- done = true
- success = true
+ end
+ end
+
+ local function d_run(prev) -- we can assume that prev and next are glyphs
+ local a = getattr(prev,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
+ else
+ a = not attribute or getprop(prev,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(prev)]
+ if lookupmatch then
+ -- sequence kan weg
+ local h, d, ok = handler(head,prev,kind,lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ success = true
+ end
+ end
+ end
+ end
+
+ local function k_run(sub,injection,last)
+ local a = getattr(sub,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(sub,a_state) == attribute)
+ else
+ a = not attribute or getprop(sub,a_state) == attribute
+ end
+ if a then
+ -- sequence kan weg
+ for n in traverse_nodes(sub) do -- only gpos
+ if n == last then
+ break
+ end
+ local id = getid(n)
+ if id == glyph_code then
+ local lookupmatch = lookupcache[getchar(n)]
+ if lookupmatch then
+ local h, d, ok = handler(sub,n,kind,lookupname,lookupmatch,sequence,lookuphash,1,injection)
+ if ok then
+ done = true
+ success = true
+ end
end
+ else
+ -- message
end
end
- prev.next = disc
- -- next.prev = disc
end
- return next
end
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
+ if getfont(start) == font and getsubtype(start) < 256 then -- why a 256 test ...
+ local a = getattr(start,0)
if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
else
- a = not attribute or start[a_state] == attribute
+ a = not attribute or getprop(start,a_state) == attribute
end
if a then
- local lookupmatch = lookupcache[start.char]
+ local char = getchar(start)
+ local lookupmatch = lookupcache[char]
if lookupmatch then
-- sequence kan weg
local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
if ok then
success = true
+ elseif gpossing and zwnjruns and char == zwnj then
+ discrun(start,d_run)
end
+ elseif gpossing and zwnjruns and char == zwnj then
+ discrun(start,d_run)
end
- if start then start = start.next end
+ if start then start = getnext(start) end
else
- start = start.next
+ start = getnext(start)
end
else
- start = start.next
+ start = getnext(start)
end
elseif id == disc_code then
- -- mostly for gsub
- if start.subtype == discretionary_code then
- local pre = start.pre
- if pre then
- local new = subrun(pre)
- if new then start.pre = new end
- end
- local post = start.post
- if post then
- local new = subrun(post)
- if new then start.post = new end
- end
- local replace = start.replace
- if replace then
- local new = subrun(replace)
- if new then start.replace = new end
- end
-elseif typ == "gpos_single" or typ == "gpos_pair" then
- kerndisc(start)
- end
- start = start.next
- elseif id == whatsit_code then -- will be function
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- -- one might wonder if the par dir should be looked at, so we might as well drop the next line
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
- end
+ if gpossing then
+ kernrun(start,k_run)
+ start = getnext(start)
+ elseif typ == "gsub_ligature" then
+ start = testrun(start,t_run,c_run)
+ else
+ comprun(start,c_run)
+ start = getnext(start)
end
- start = start.next
elseif id == math_code then
- start = end_of_math(start).next
+ start = getnext(end_of_math(start))
else
- start = start.next
+ start = getnext(start)
end
end
end
+
else
- local function subrun(start)
- -- mostly for gsub, gpos would demand a more clever approach
- local head = start
- local done = false
+ local function c_run(head)
+ local done = false
+ local start = sweephead[head]
+ if start then
+ sweephead[head] = nil
+ else
+ start = head
+ end
while start do
- local id = start.id
- if id == glyph_code and start.id == font and start.subtype <256 then
- local a = start[0]
+ local id = getid(start)
+ if id ~= glyph_code then
+ -- very unlikely
+ start = getnext(start)
+ elseif getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
else
- a = not attribute or start[a_state] == attribute
+ a = not attribute or getprop(start,a_state) == attribute
end
if a then
+ local char = getchar(start)
for i=1,ns do
local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[start.char]
+ local lookupmatch = lookupcache[char]
if lookupmatch then
-- we could move all code inline but that makes things even more unreadable
local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
if ok then
done = true
break
@@ -2419,158 +3370,192 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_missing_cache(typ,lookupname)
end
end
- if start then start = start.next end
+ if start then start = getnext(start) end
else
- start = start.next
+ start = getnext(start)
end
else
- start = start.next
+ return head, false
end
end
if done then
success = true
- return head
end
+ return head, done
end
- local function kerndisc(disc) -- we can assume that prev and next are glyphs
- local prev = disc.prev
- local next = disc.next
- if prev and next then
- prev.next = next
- -- next.prev = prev
- local a = prev[0]
- if a then
- a = (a == attr) and (not attribute or prev[a_state] == attribute)
- else
- a = not attribute or prev[a_state] == attribute
+ local function d_run(prev)
+ local a = getattr(prev,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
+ else
+ a = not attribute or getprop(prev,a_state) == attribute
+ end
+ if a then
+ -- brr prev can be disc
+ local char = getchar(prev)
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local h, d, ok = handler(head,prev,kind,lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[prev.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- done = true
- break
+ end
+ end
+
+ local function k_run(sub,injection,last)
+ local a = getattr(sub,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(sub,a_state) == attribute)
+ else
+ a = not attribute or getprop(sub,a_state) == attribute
+ end
+ if a then
+ for n in traverse_nodes(sub) do -- only gpos
+ if n == last then
+ break
+ end
+ local id = getid(n)
+ if id == glyph_code then
+ local char = getchar(n)
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ local h, d, ok = handler(head,n,kind,lookupname,lookupmatch,sequence,lookuphash,i,injection)
+ if ok then
+ done = true
+ break
+ end
end
+ else
+ report_missing_cache(typ,lookupname)
end
- else
- report_missing_cache(typ,lookupname)
end
+ else
+ -- message
end
end
- prev.next = disc
- -- next.prev = disc
end
- return next
+ end
+
+ local function t_run(start,stop)
+ while start ~= stop do
+ local id = getid(start)
+ if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local char = getchar(start)
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- if we need more than ligatures we can outline the code and use functions
+ local s = getnext(start)
+ local l = nil
+ while s do
+ local lg = lookupmatch[getchar(s)]
+ if lg then
+ l = lg
+ s = getnext(s)
+ else
+ break
+ end
+ end
+ if l and l.ligature then
+ return true
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ end
+ start = getnext(start)
+ else
+ break
+ end
+ end
end
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
else
- a = not attribute or start[a_state] == attribute
+ a = not attribute or getprop(start,a_state) == attribute
end
if a then
for i=1,ns do
- local lookupname = subtables[i]
+ local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[start.char]
+ local char = getchar(start)
+ local lookupmatch = lookupcache[char]
if lookupmatch then
-- we could move all code inline but that makes things even more unreadable
local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
if ok then
success = true
break
elseif not start then
-- don't ask why ... shouldn't happen
break
+ elseif gpossing and zwnjruns and char == zwnj then
+ discrun(start,d_run)
end
+ elseif gpossing and zwnjruns and char == zwnj then
+ discrun(start,d_run)
end
else
report_missing_cache(typ,lookupname)
end
end
- if start then start = start.next end
+ if start then start = getnext(start) end
else
- start = start.next
+ start = getnext(start)
end
else
- start = start.next
+ start = getnext(start)
end
elseif id == disc_code then
- -- mostly for gsub
- if start.subtype == discretionary_code then
- local pre = start.pre
- if pre then
- local new = subrun(pre)
- if new then start.pre = new end
- end
- local post = start.post
- if post then
- local new = subrun(post)
- if new then start.post = new end
- end
- local replace = start.replace
- if replace then
- local new = subrun(replace)
- if new then start.replace = new end
- end
-elseif typ == "gpos_single" or typ == "gpos_pair" then
- kerndisc(start)
- end
- start = start.next
- elseif id == whatsit_code then
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
- end
+ if gpossing then
+ kernrun(start,k_run)
+ start = getnext(start)
+ elseif typ == "gsub_ligature" then
+ start = testrun(start,t_run,c_run)
+ else
+ comprun(start,c_run)
+ start = getnext(start)
end
- start = start.next
elseif id == math_code then
- start = end_of_math(start).next
+ start = getnext(end_of_math(start))
else
- start = start.next
+ start = getnext(start)
end
end
end
@@ -2581,10 +3566,16 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
if trace_steps then -- ?
registerstep(head)
end
+
end
+
+ head = tonode(head)
+
return head, done
end
+-- this might move to the loader
+
local function generic(lookupdata,lookupname,unicode,lookuphash)
local target = lookuphash[lookupname]
if target then
@@ -2594,47 +3585,48 @@ local function generic(lookupdata,lookupname,unicode,lookuphash)
end
end
-local action = {
+local function ligature(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if not target then
+ target = { }
+ lookuphash[lookupname] = target
+ end
+ for i=1,#lookupdata do
+ local li = lookupdata[i]
+ local tu = target[li]
+ if not tu then
+ tu = { }
+ target[li] = tu
+ end
+ target = tu
+ end
+ target.ligature = unicode
+end
+local function pair(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if not target then
+ target = { }
+ lookuphash[lookupname] = target
+ end
+ local others = target[unicode]
+ local paired = lookupdata[1]
+ if others then
+ others[paired] = lookupdata
+ else
+ others = { [paired] = lookupdata }
+ target[unicode] = others
+ end
+end
+
+local action = {
substitution = generic,
multiple = generic,
alternate = generic,
position = generic,
-
- ligature = function(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- for i=1,#lookupdata do
- local li = lookupdata[i]
- local tu = target[li]
- if not tu then
- tu = { }
- target[li] = tu
- end
- target = tu
- end
- target.ligature = unicode
- end,
-
- pair = function(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- local others = target[unicode]
- local paired = lookupdata[1]
- if others then
- others[paired] = lookupdata
- else
- others = { [paired] = lookupdata }
- target[unicode] = others
- end
- end,
-
+ ligature = ligature,
+ pair = pair,
+ kern = pair,
}
local function prepare_lookups(tfmdata)
@@ -2647,12 +3639,17 @@ local function prepare_lookups(tfmdata)
local lookuptypes = resources.lookuptypes
local characters = tfmdata.characters
local descriptions = tfmdata.descriptions
+ local duplicates = resources.duplicates
-- we cannot free the entries in the descriptions as sometimes we access
-- then directly (for instance anchors) ... selectively freeing does save
-- much memory as it's only a reference to a table and the slot in the
-- description hash is not freed anyway
+ -- we can delay this using metatables so that we don't make the hashes for
+ -- features we don't use but then we need to loop over the characters
+ -- many times so we gain nothing
+
for unicode, character in next, characters do -- we cannot loop over descriptions !
local description = descriptions[unicode]
@@ -2662,7 +3659,7 @@ local function prepare_lookups(tfmdata)
local lookups = description.slookups
if lookups then
for lookupname, lookupdata in next, lookups do
- action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
+ action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash,duplicates)
end
end
@@ -2672,7 +3669,7 @@ local function prepare_lookups(tfmdata)
local lookuptype = lookuptypes[lookupname]
for l=1,#lookuplist do
local lookupdata = lookuplist[l]
- action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
+ action[lookuptype](lookupdata,lookupname,unicode,lookuphash,duplicates)
end
end
end
@@ -2696,7 +3693,7 @@ local function prepare_lookups(tfmdata)
for name, anchor in next, anchors do
local lookups = anchor_to_lookup[name]
if lookups then
- for lookup, _ in next, lookups do
+ for lookup in next, lookups do
local target = lookuphash[lookup]
if target then
target[unicode] = anchors
@@ -2716,6 +3713,8 @@ local function prepare_lookups(tfmdata)
end
+-- so far
+
local function split(replacement,original)
local result = { }
for i=1,#replacement do
@@ -2734,6 +3733,7 @@ local function prepare_contextchains(tfmdata)
local rawdata = tfmdata.shared.rawdata
local resources = rawdata.resources
local lookuphash = resources.lookuphash
+ local lookuptags = resources.lookuptags
local lookups = rawdata.lookups
if lookups then
for lookupname, lookupdata in next, rawdata.lookups do
@@ -2747,7 +3747,7 @@ local function prepare_contextchains(tfmdata)
report_prepare("unsupported format %a",format)
elseif not validformat[lookuptype] then
-- todo: dejavu-serif has one (but i need to see what use it has)
- report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname])
else
local contexts = lookuphash[lookupname]
if not contexts then
@@ -2790,7 +3790,7 @@ local function prepare_contextchains(tfmdata)
-- use sequence[start] instead but it's somewhat ugly.
nt = nt + 1
t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
- for unic, _ in next, sequence[start] do
+ for unic in next, sequence[start] do
local cu = contexts[unic]
if not cu then
contexts[unic] = t
@@ -2803,7 +3803,7 @@ local function prepare_contextchains(tfmdata)
-- no rules
end
else
- report_prepare("missing lookuptype for lookupname %a",lookupname)
+ report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname])
end
end
end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-syn.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-syn.lua
new file mode 100644
index 00000000000..f03d558bfb4
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-syn.lua
@@ -0,0 +1,106 @@
+if not modules then modules = { } end modules ['luatex-fonts-syn'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+-- Generic font names support.
+--
+-- Watch out, the version number is the same as the one used in
+-- the mtx-fonts.lua function scripts.fonts.names as we use a
+-- simplified font database in the plain solution and by using
+-- a different number we're less dependent on context.
+--
+-- mtxrun --script font --reload --simple
+--
+-- The format of the file is as follows:
+--
+-- return {
+-- ["version"] = 1.001,
+-- ["cache_version"] = 1.001,
+-- ["mappings"] = {
+-- ["somettcfontone"] = { "Some TTC Font One", "SomeFontA.ttc", 1 },
+-- ["somettcfonttwo"] = { "Some TTC Font Two", "SomeFontA.ttc", 2 },
+-- ["somettffont"] = { "Some TTF Font", "SomeFontB.ttf" },
+-- ["someotffont"] = { "Some OTF Font", "SomeFontC.otf" },
+-- },
+-- }
+
+local fonts = fonts
+fonts.names = fonts.names or { }
+
+fonts.names.version = 1.001 -- not the same as in context but matches mtx-fonts --simple
+fonts.names.basename = "luatex-fonts-names"
+fonts.names.new_to_old = { }
+fonts.names.old_to_new = { }
+fonts.names.cache = containers.define("fonts","data",fonts.names.version,true)
+
+local data, loaded = nil, false
+
+local fileformats = { "lua", "tex", "other text files" }
+
+function fonts.names.reportmissingbase()
+ texio.write("<missing font database, run: mtxrun --script fonts --reload --simple>")
+ fonts.names.reportmissingbase = nil
+end
+
+function fonts.names.reportmissingname()
+ texio.write("<unknown font in database, run: mtxrun --script fonts --reload --simple>")
+ fonts.names.reportmissingname = nil
+end
+
+function fonts.names.resolve(name,sub)
+ if not loaded then
+ local basename = fonts.names.basename
+ if basename and basename ~= "" then
+ data = containers.read(fonts.names.cache,basename)
+ if not data then
+ basename = file.addsuffix(basename,"lua")
+ for i=1,#fileformats do
+ local format = fileformats[i]
+ local foundname = resolvers.findfile(basename,format) or ""
+ if foundname ~= "" then
+ data = dofile(foundname)
+ texio.write("<font database loaded: ",foundname,">")
+ break
+ end
+ end
+ end
+ end
+ loaded = true
+ end
+ if type(data) == "table" and data.version == fonts.names.version then
+ local condensed = string.gsub(string.lower(name),"[^%a%d]","")
+ local found = data.mappings and data.mappings[condensed]
+ if found then
+ local fontname, filename, subfont = found[1], found[2], found[3]
+ if subfont then
+ return filename, fontname
+ else
+ return filename, false
+ end
+ elseif fonts.names.reportmissingname then
+ fonts.names.reportmissingname()
+ return name, false -- fallback to filename
+ end
+ elseif fonts.names.reportmissingbase then
+ fonts.names.reportmissingbase()
+ end
+end
+
+fonts.names.resolvespec = fonts.names.resolve -- only supported in mkiv
+
+function fonts.names.getfilename(askedname,suffix) -- only supported in mkiv
+ return ""
+end
+
+function fonts.names.ignoredfile(filename) -- only supported in mkiv
+ return false -- will be overloaded
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-tfm.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-tfm.lua
index b9bb1bd0f28..b9bb1bd0f28 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-tfm.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-tfm.lua
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts.lua
new file mode 100644
index 00000000000..f18ba35db16
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts.lua
@@ -0,0 +1,270 @@
+if not modules then modules = { } end modules ['luatex-fonts'] = {
+ version = 1.001,
+ comment = "companion to luatex-fonts.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- The following code isolates the generic context code from already defined or to be defined
+-- namespaces. This is the reference loader for plain tex. This generic code is also used in
+-- luaotfload which is a low level lualatex opentype font loader but somehow has gotten a bit
+-- too generic name / prefix, originally set up and maintained by Khaled Hosny. Currently that
+-- set of derived files is maintained by a larger team lead by Philipp Gesang so when there are
+-- issues with this code in latex, you can best contact him. It might make sense then to first
+-- check if context has the same issue. We do our best to keep the interface as clean as possible.
+--
+-- The code base is rather stable now, especially if you stay away from the non generic code. All
+-- relevant data is organized in tables within the main table of a font instance. There are a few
+-- places where in context other code is plugged in, but this does not affect the core code. Users
+-- can (given that their macro package provides this option) access the font data (characters,
+-- descriptions, properties, parameters, etc) of this main table. The documentation is part of
+-- context. There is also a manual for the helper libraries (maintained as part of the cld manuals).
+--
+-- Future versions will probably have some more specific context code removed, like tracing and
+-- obscure hooks, so that we have a more efficient version (and less files too). So, don't depend
+-- too much on low level code that is meant for context as it can change without notice. We might
+-- also add more helper code here, but that depends to what extend metatex (sidetrack of context)
+-- evolves into a low level layer (depends on time, as usual).
+
+-- The code here is the same as in context version 2015.09.11 but the rendering in context can be
+-- different from generic. This can be a side effect of additional callbacks, additional features
+-- and interferences between mechanisms between macro packages. We use the rendering in context
+-- and luatex-plain as reference for issues.
+
+utf = utf or unicode.utf8
+
+-- We have some (global) hooks (for latex):
+
+if not non_generic_context then
+ non_generic_context = { }
+end
+
+if not non_generic_context.luatex_fonts then
+ non_generic_context.luatex_fonts = {
+ -- load_before = nil,
+ -- load_after = nil,
+ -- skip_loading = nil,
+ }
+end
+
+if not generic_context then
+ generic_context = { }
+end
+
+if not generic_context.push_namespaces then
+
+ function generic_context.push_namespaces()
+ texio.write(" <push namespace>")
+ local normalglobal = { }
+ for k, v in next, _G do
+ normalglobal[k] = v
+ end
+ return normalglobal
+ end
+
+ function generic_context.pop_namespaces(normalglobal,isolate)
+ if normalglobal then
+ texio.write(" <pop namespace>")
+ for k, v in next, _G do
+ if not normalglobal[k] then
+ generic_context[k] = v
+ if isolate then
+ _G[k] = nil
+ end
+ end
+ end
+ for k, v in next, normalglobal do
+ _G[k] = v
+ end
+ -- just to be sure:
+ setmetatable(generic_context,_G)
+ else
+ texio.write(" <fatal error: invalid pop of generic_context>")
+ os.exit()
+ end
+ end
+
+end
+
+local whatever = generic_context.push_namespaces()
+
+-- We keep track of load time by storing the current time. That way we cannot be accused
+-- of slowing down loading too much. Anyhow, there is no reason for this library to perform
+-- slower in any other package as it does in context.
+--
+-- Please don't update to this version without proper testing. It might be that this version
+-- lags behind stock context and the only formal release takes place around tex live code
+-- freeze.
+
+local starttime = os.gettimeofday()
+
+-- As we don't use the context file searching, we need to initialize the kpse library. As the
+-- progname can be anything we will temporary switch to the context namespace if needed. Just
+-- adding the context paths to the path specification is somewhat faster.
+--
+-- Now, with lua 5.2 being used we might create a special ENV for this.
+
+-- kpse.set_program_name("luatex")
+
+local ctxkpse = nil
+local verbose = true
+
+local function loadmodule(name,continue)
+ local foundname = kpse.find_file(name,"tex") or ""
+ if not foundname then
+ if not ctxkpse then
+ ctxkpse = kpse.new("luatex","context")
+ end
+ foundname = ctxkpse:find_file(name,"tex") or ""
+ end
+ if foundname == "" then
+ if not continue then
+ texio.write_nl(string.format(" <luatex-fonts: unable to locate %s>",name))
+ os.exit()
+ end
+ else
+ if verbose then
+ texio.write(string.format(" <%s>",foundname)) -- no file.basename yet
+ end
+ dofile(foundname)
+ end
+end
+
+if non_generic_context.luatex_fonts.load_before then
+ loadmodule(non_generic_context.luatex_fonts.load_before,true)
+end
+
+if non_generic_context.luatex_fonts.skip_loading ~= true then
+
+ loadmodule('luatex-fonts-merged.lua',true)
+
+ if fonts then
+
+ if not fonts._merge_loaded_message_done_ then
+ texio.write_nl("log", "!")
+ texio.write_nl("log", "! I am using the merged version of 'luatex-fonts.lua' here. If")
+ texio.write_nl("log", "! you run into problems or experience unexpected behaviour, and")
+ texio.write_nl("log", "! if you have ConTeXt installed you can try to delete the file")
+ texio.write_nl("log", "! 'luatex-font-merged.lua' as I might then use the possibly")
+ texio.write_nl("log", "! updated libraries. The merged version is not supported as it")
+ texio.write_nl("log", "! is a frozen instance. Problems can be reported to the ConTeXt")
+ texio.write_nl("log", "! mailing list.")
+ texio.write_nl("log", "!")
+ end
+
+ fonts._merge_loaded_message_done_ = true
+
+ else
+
+ -- The following helpers are a bit overkill but I don't want to mess up context code for the
+ -- sake of general generality. Around version 1.0 there will be an official api defined.
+ --
+ -- So, I will strip these libraries and see what is really needed so that we don't have this
+ -- overhead in the generic modules. The next section is only there for the packager, so stick
+ -- to using luatex-fonts with luatex-fonts-merged.lua and forget about the rest. The following
+ -- list might change without prior notice (for instance because we shuffled code around).
+
+ loadmodule("l-lua.lua")
+ loadmodule("l-lpeg.lua")
+ loadmodule("l-function.lua")
+ loadmodule("l-string.lua")
+ loadmodule("l-table.lua")
+ loadmodule("l-io.lua")
+ loadmodule("l-file.lua")
+ loadmodule("l-boolean.lua")
+ loadmodule("l-math.lua")
+ loadmodule("util-str.lua")
+
+ -- The following modules contain code that is either not used at all outside context or will fail
+ -- when enabled due to lack of other modules.
+
+ -- First we load a few helper modules. This is about the miminum needed to let the font modules do
+ -- their work. Don't depend on their functions as we might strip them in future versions of his
+ -- generic variant.
+
+ loadmodule('luatex-basics-gen.lua')
+ loadmodule('data-con.lua')
+
+ -- We do need some basic node support. The code in there is not for general use as it might change.
+
+ loadmodule('luatex-basics-nod.lua')
+
+ -- Now come the font modules that deal with traditional tex fonts as well as open type fonts. We only
+ -- support OpenType fonts here.
+ --
+ -- The font database file (if used at all) must be put someplace visible for kpse and is not shared
+ -- with context. The mtx-fonts script can be used to genate this file (using the --names option).
+
+ -- In 2013/14 I will merge/move some generic files into luatex-fonts-* files (copies) so that
+ -- intermediate updates of context don't interfere. We can then also use the general merger and
+ -- consider stripping debug code.
+
+ loadmodule('font-ini.lua')
+ loadmodule('font-con.lua')
+ loadmodule('luatex-fonts-enc.lua') -- will load font-age on demand
+ loadmodule('font-cid.lua')
+ loadmodule('font-map.lua') -- for loading lum file (will be stripped)
+ loadmodule('luatex-fonts-syn.lua') -- deals with font names (synonyms)
+ -- begin of test
+ loadmodule('font-tfm.lua') -- optional
+ loadmodule('font-afm.lua') -- optional
+ loadmodule('font-afk.lua') -- optional
+ -- end of test
+ loadmodule('luatex-fonts-tfm.lua')
+ loadmodule('font-oti.lua')
+ loadmodule('font-otf.lua')
+ loadmodule('font-otb.lua')
+ loadmodule('luatex-fonts-inj.lua') -- normally the same as font-inj.lua
+ loadmodule('luatex-fonts-ota.lua')
+ loadmodule('luatex-fonts-otn.lua') -- normally the same as font-otn.lua
+ loadmodule('font-otp.lua')
+ loadmodule('luatex-fonts-lua.lua')
+ loadmodule('font-def.lua') -- this code (stripped) might end up in luatex-fonts-def.lua
+ loadmodule('luatex-fonts-def.lua')
+ loadmodule('luatex-fonts-ext.lua') -- some extensions
+
+ -- We need to plug into a callback and the following module implements the handlers. Actual plugging
+ -- in happens later.
+
+ loadmodule('luatex-fonts-cbk.lua')
+
+ end
+
+end
+
+if non_generic_context.luatex_fonts.load_after then
+ loadmodule(non_generic_context.luatex_fonts.load_after,true)
+end
+
+resolvers.loadmodule = loadmodule
+
+-- In order to deal with the fonts we need to initialize some callbacks. One can overload them later on if
+-- needed. First a bit of abstraction.
+
+generic_context.callback_ligaturing = false
+generic_context.callback_kerning = false
+generic_context.callback_pre_linebreak_filter = nodes.simple_font_handler
+generic_context.callback_hpack_filter = nodes.simple_font_handler
+generic_context.callback_define_font = fonts.definers.read
+
+-- The next ones can be done at a different moment if needed. You can create a generic_context namespace
+-- and set no_callbacks_yet to true, load this module, and enable the callbacks later. So, there is really
+-- *no* need to create a alternative for luatex-fonts.lua and luatex-fonts-merged.lua: just load this one
+-- and overload if needed.
+
+if not generic_context.no_callbacks_yet then
+
+ callback.register('ligaturing', generic_context.callback_ligaturing)
+ callback.register('kerning', generic_context.callback_kerning)
+ callback.register('pre_linebreak_filter', generic_context.callback_pre_linebreak_filter)
+ callback.register('hpack_filter', generic_context.callback_hpack_filter)
+ callback.register('define_font' , generic_context.callback_define_font)
+
+end
+
+-- We're done.
+
+texio.write(string.format(" <luatex-fonts.lua loaded in %0.3f seconds>", os.gettimeofday()-starttime))
+
+generic_context.pop_namespaces(whatever)
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts.tex b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts.tex
new file mode 100644
index 00000000000..7b457e9b4fc
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-fonts.tex
@@ -0,0 +1,140 @@
+%D \module
+%D [ file=luatex-fonts,
+%D version=2009.12.01,
+%D title=\LUATEX\ Support Macros,
+%D subtitle=Generic \OPENTYPE\ Font Handler,
+%D author=Hans Hagen,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+
+%D \subject{Welcome}
+%D
+%D This file is one of a set of basic functionality enhancements
+%D for \LUATEX\ derived from the \CONTEXT\ \MKIV\ code base. Please
+%D don't polute the \type {luatex-*} namespace with code not coming
+%D from the \CONTEXT\ development team as we may add more files.
+%D
+%D As this is an experimental setup, it might not always work out as
+%D expected. Around \LUATEX\ version 0.50 we expect the code to be
+%D more or less okay.
+%D
+%D This file implements a basic font system for a bare \LUATEX\
+%D system. By default \LUATEX\ only knows about the classic \TFM\
+%D fonts but it can read other font formats and pass them to \LUA.
+%D With some glue code one can then construct a suitable \TFM\
+%D representation that \LUATEX\ can work with. For more advanced font
+%D support a bit more code is needed that needs to be hooked
+%D into the callback mechanism.
+%D
+%D This file is currently rather simple: it just loads the \LUA\ file
+%D with the same name. An example of a \type {luatex.tex} file that is
+%D just plain \TEX:
+%D
+%D \starttyping
+%D \catcode`\{=1 % left brace is begin-group character
+%D \catcode`\}=2 % right brace is end-group character
+%D
+%D \input plain
+%D
+%D \everyjob\expandafter{\the\everyjob\input luatex-fonts\relax}
+%D
+%D \dump
+%D \stoptyping
+%D
+%D We could load the \LUA\ file in \type {\everyjob} but maybe some
+%D day we need more here.
+%D
+%D When defining a font you can use two prefixes. A \type {file:}
+%D prefix forced a file search, while a \type {name:} prefix will
+%D result in consulting the names database. Such a database can be
+%D generated with:
+%D
+%D \starttyping
+%D mtxrun --usekpse --script fonts --names
+%D \stoptyping
+%D
+%D This will generate a file \type {luatex-fonts-names.lua} that has
+%D to be placed in a location where it can be found by \KPSE. Beware:
+%D the \type {--kpseonly} flag is only used outside \CONTEXT\ and
+%D provides very limited functionality, just enough for this task.
+%D
+%D The code loaded here does not come out of thin air, but is mostly
+%D shared with \CONTEXT, however, in that macropackage we go beyond
+%D what is provided here. When you use the code packaged here you
+%D need to keep a few things in mind:
+%D
+%D \startitemize
+%D
+%D \item This subsystem will be extended, improved etc. in about the
+%D same pace as \CONTEXT\ \MKIV. However, because \CONTEXT\ provides a
+%D rather high level of integration not all features will be supported
+%D in the same quality. Use \CONTEXT\ if you want more goodies.
+%D
+%D \item There is no official \API\ yet, which means that using
+%D functions implemented here is at your own risk, in the sense that
+%D names and namespaces might change. There will be a minimal \API\
+%D defined once \LUATEX\ version 1.0 is out. Instead of patching the
+%D files it's better to overload functions if needed.
+%D
+%D \item The modules are not stripped too much, which makes it
+%D possible to benefit from improvements in the code that take place
+%D in the perspective of \CONTEXT\ development. They might be split a
+%D bit more in due time so the baseline might become smaller.
+%D
+%D \item The code is maintained and tested by the \CONTEXT\
+%D development team. As such it might be better suited for this macro
+%D package and integration in other systems might demand some
+%D additional wrapping. Problems can be reported to the team but as we
+%D use \CONTEXT\ \MKIV\ as baseline, you'd better check if the problem
+%D is a general \CONTEXT\ problem too.
+%D
+%D \item The more high level support for features that is provided in
+%D \CONTEXT\ is not part of the code loaded here as it makes no sense
+%D elsewhere. Some experimental features are not part of this code
+%D either but some might show up later.
+%D
+%D \item Math font support will be added but only in its basic form
+%D once that the Latin Modern and \TEX\ Gyre math fonts are
+%D available.
+%D
+%D \item At this moment the more nifty speed-ups are not enabled
+%D because they work in tandem with the alternative file handling
+%D that \CONTEXT\ uses. Maybe around \LUATEX\ 1.0 we will bring some
+%D speedup into this code too (if it pays off at all).
+%D
+%D \item The code defines a few global tables. If this code is used
+%D in a larger perspective then you can best make sure that no
+%D conflicts occur. The \CONTEXT\ package expects users to work in
+%D their own namespace (\type {userdata}, \type {thirddata}, \type
+%D {moduledata} or \type {document}. The team takes all freedom to
+%D use any table at the global level but will not use tables that are
+%D named after macro packages. Later the \CONTEXT\ might operate in
+%D a more controlled namespace but it has a low priority.
+%D
+%D \item There is some tracing code present but this is not enabled
+%D and not supported outside \CONTEXT\ either as it integrates quite
+%D tightly into \CONTEXT. In case of problems you can use \CONTEXT\
+%D for tracking down problems.
+%D
+%D \item Patching the code in distributions is dangerous as it might
+%D fix your problem but introduce new ones for \CONTEXT. So, best keep
+%D the original code as it is.
+%D
+%D \item Attributes are (automatically) taken from the range 127-255 so
+%D you'd best not use these yourself.
+%D
+%D \stopitemize
+%D
+%D If this all sounds a bit tricky, keep in mind that it makes no sense
+%D for us to maintain multiple code bases and we happen to use \CONTEXT.
+%D
+%D For more details about how the font subsystem works we refer to
+%D publications in \TEX\ related journals, the \CONTEXT\ documentation,
+%D and the \CONTEXT\ wiki.
+
+\directlua {
+ if not fonts then
+ dofile(kpse.find_file("luatex-fonts.lua","tex"))
+ end
+}
+
+\endinput
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-boolean.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-boolean.lua
new file mode 100644
index 00000000000..8f18d4c003d
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-boolean.lua
@@ -0,0 +1,69 @@
+if not modules then modules = { } end modules ['l-boolean'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, tonumber = type, tonumber
+
+boolean = boolean or { }
+local boolean = boolean
+
+function boolean.tonumber(b)
+ if b then return 1 else return 0 end -- test and return or return
+end
+
+function toboolean(str,tolerant) -- global
+ if str == nil then
+ return false
+ elseif str == false then
+ return false
+ elseif str == true then
+ return true
+ elseif str == "true" then
+ return true
+ elseif str == "false" then
+ return false
+ elseif not tolerant then
+ return false
+ elseif str == 0 then
+ return false
+ elseif (tonumber(str) or 0) > 0 then
+ return true
+ else
+ return str == "yes" or str == "on" or str == "t"
+ end
+end
+
+string.toboolean = toboolean
+
+function string.booleanstring(str)
+ if str == "0" then
+ return false
+ elseif str == "1" then
+ return true
+ elseif str == "" then
+ return false
+ elseif str == "false" then
+ return false
+ elseif str == "true" then
+ return true
+ elseif (tonumber(str) or 0) > 0 then
+ return true
+ else
+ return str == "yes" or str == "on" or str == "t"
+ end
+end
+
+function string.is_boolean(str,default,strict)
+ if type(str) == "string" then
+ if str == "true" or str == "yes" or str == "on" or str == "t" or (not strict and str == "1") then
+ return true
+ elseif str == "false" or str == "no" or str == "off" or str == "f" or (not strict and str == "0") then
+ return false
+ end
+ end
+ return default
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-file.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-file.lua
new file mode 100644
index 00000000000..7ed6370f260
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-file.lua
@@ -0,0 +1,698 @@
+if not modules then modules = { } end modules ['l-file'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- needs a cleanup
+
+file = file or { }
+local file = file
+
+if not lfs then
+ lfs = optionalrequire("lfs")
+end
+
+-- -- see later
+--
+-- if not lfs then
+--
+-- lfs = {
+-- getcurrentdir = function()
+-- return "."
+-- end,
+-- attributes = function()
+-- return nil
+-- end,
+-- isfile = function(name)
+-- local f = io.open(name,'rb')
+-- if f then
+-- f:close()
+-- return true
+-- end
+-- end,
+-- isdir = function(name)
+-- print("you need to load lfs")
+-- return false
+-- end
+-- }
+--
+-- elseif not lfs.isfile then
+--
+-- local attributes = lfs.attributes
+--
+-- function lfs.isdir(name)
+-- return attributes(name,"mode") == "directory"
+-- end
+--
+-- function lfs.isfile(name)
+-- return attributes(name,"mode") == "file"
+-- end
+--
+-- -- function lfs.isdir(name)
+-- -- local a = attributes(name)
+-- -- return a and a.mode == "directory"
+-- -- end
+--
+-- -- function lfs.isfile(name)
+-- -- local a = attributes(name)
+-- -- return a and a.mode == "file"
+-- -- end
+--
+-- end
+
+local insert, concat = table.insert, table.concat
+local match, find, gmatch = string.match, string.find, string.gmatch
+local lpegmatch = lpeg.match
+local getcurrentdir, attributes = lfs.currentdir, lfs.attributes
+local checkedsplit = string.checkedsplit
+
+-- local patterns = file.patterns or { }
+-- file.patterns = patterns
+
+local P, R, S, C, Cs, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc, lpeg.Ct
+
+-- better this way:
+
+local tricky = S("/\\") * P(-1)
+local attributes = lfs.attributes
+
+if sandbox then
+ sandbox.redefine(lfs.isfile,"lfs.isfile")
+ sandbox.redefine(lfs.isdir, "lfs.isdir")
+end
+
+function lfs.isdir(name)
+ if lpegmatch(tricky,name) then
+ return attributes(name,"mode") == "directory"
+ else
+ return attributes(name.."/.","mode") == "directory"
+ end
+end
+
+function lfs.isfile(name)
+ return attributes(name,"mode") == "file"
+end
+
+local colon = P(":")
+local period = P(".")
+local periods = P("..")
+local fwslash = P("/")
+local bwslash = P("\\")
+local slashes = S("\\/")
+local noperiod = 1-period
+local noslashes = 1-slashes
+local name = noperiod^1
+local suffix = period/"" * (1-period-slashes)^1 * -1
+
+----- pattern = C((noslashes^0 * slashes^1)^1)
+local pattern = C((1 - (slashes^1 * noslashes^1 * -1))^1) * P(1) -- there must be a more efficient way
+
+local function pathpart(name,default)
+ return name and lpegmatch(pattern,name) or default or ""
+end
+
+local pattern = (noslashes^0 * slashes)^1 * C(noslashes^1) * -1
+
+local function basename(name)
+ return name and lpegmatch(pattern,name) or name
+end
+
+-- print(pathpart("file"))
+-- print(pathpart("dir/file"))
+-- print(pathpart("/dir/file"))
+-- print(basename("file"))
+-- print(basename("dir/file"))
+-- print(basename("/dir/file"))
+
+local pattern = (noslashes^0 * slashes^1)^0 * Cs((1-suffix)^1) * suffix^0
+
+local function nameonly(name)
+ return name and lpegmatch(pattern,name) or name
+end
+
+local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * C(noperiod^1) * -1
+
+local function suffixonly(name)
+ return name and lpegmatch(pattern,name) or ""
+end
+
+local pattern = (noslashes^0 * slashes)^0 * noperiod^1 * ((period * C(noperiod^1))^1) * -1 + Cc("")
+
+local function suffixesonly(name)
+ if name then
+ return lpegmatch(pattern,name)
+ else
+ return ""
+ end
+end
+
+file.pathpart = pathpart
+file.basename = basename
+file.nameonly = nameonly
+file.suffixonly = suffixonly
+file.suffix = suffixonly
+file.suffixesonly = suffixesonly
+file.suffixes = suffixesonly
+
+file.dirname = pathpart -- obsolete
+file.extname = suffixonly -- obsolete
+
+-- actually these are schemes
+
+local drive = C(R("az","AZ")) * colon
+local path = C((noslashes^0 * slashes)^0)
+local suffix = period * C(P(1-period)^0 * P(-1))
+local base = C((1-suffix)^0)
+local rest = C(P(1)^0)
+
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
+
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix) -- trick: two extra captures
+local pattern_d = path * rest
+
+function file.splitname(str,splitdrive)
+ if not str then
+ -- error
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.splitbase(str)
+ if str then
+ return lpegmatch(pattern_d,str) -- returns path, base+suffix (path has / appended, might change at some point)
+ else
+ return "", str -- assume no path
+ end
+end
+
+---- stripslash = C((1 - P("/")^1*P(-1))^0)
+
+function file.nametotable(str,splitdrive)
+ if str then
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ -- if path ~= "" then
+ -- path = lpegmatch(stripslash,path) -- unfortunate hack, maybe this becomes default
+ -- end
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
+ end
+end
+
+-- print(file.splitname("file"))
+-- print(file.splitname("dir/file"))
+-- print(file.splitname("/dir/file"))
+-- print(file.splitname("file"))
+-- print(file.splitname("dir/file"))
+-- print(file.splitname("/dir/file"))
+
+-- inspect(file.nametotable("file.ext"))
+-- inspect(file.nametotable("dir/file.ext"))
+-- inspect(file.nametotable("/dir/file.ext"))
+-- inspect(file.nametotable("file.ext"))
+-- inspect(file.nametotable("dir/file.ext"))
+-- inspect(file.nametotable("/dir/file.ext"))
+
+----- pattern = Cs(((period * noperiod^1 * -1) / "" + 1)^1)
+local pattern = Cs(((period * (1-period-slashes)^1 * -1) / "" + 1)^1)
+
+function file.removesuffix(name)
+ return name and lpegmatch(pattern,name)
+end
+
+-- local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * Cp() * noperiod^1 * -1
+--
+-- function file.addsuffix(name, suffix)
+-- local p = lpegmatch(pattern,name)
+-- if p then
+-- return name
+-- else
+-- return name .. "." .. suffix
+-- end
+-- end
+
+local suffix = period/"" * (1-period-slashes)^1 * -1
+local pattern = Cs((noslashes^0 * slashes^1)^0 * ((1-suffix)^1)) * Cs(suffix)
+
+function file.addsuffix(filename,suffix,criterium)
+ if not filename or not suffix or suffix == "" then
+ return filename
+ elseif criterium == true then
+ return filename .. "." .. suffix
+ elseif not criterium then
+ local n, s = lpegmatch(pattern,filename)
+ if not s or s == "" then
+ return filename .. "." .. suffix
+ else
+ return filename
+ end
+ else
+ local n, s = lpegmatch(pattern,filename)
+ if s and s ~= "" then
+ local t = type(criterium)
+ if t == "table" then
+ -- keep if in criterium
+ for i=1,#criterium do
+ if s == criterium[i] then
+ return filename
+ end
+ end
+ elseif t == "string" then
+ -- keep if criterium
+ if s == criterium then
+ return filename
+ end
+ end
+ end
+ return (n or filename) .. "." .. suffix
+ end
+end
+
+-- print("1 " .. file.addsuffix("name","new") .. " -> name.new")
+-- print("2 " .. file.addsuffix("name.old","new") .. " -> name.old")
+-- print("3 " .. file.addsuffix("name.old","new",true) .. " -> name.old.new")
+-- print("4 " .. file.addsuffix("name.old","new","new") .. " -> name.new")
+-- print("5 " .. file.addsuffix("name.old","new","old") .. " -> name.old")
+-- print("6 " .. file.addsuffix("name.old","new","foo") .. " -> name.new")
+-- print("7 " .. file.addsuffix("name.old","new",{"foo","bar"}) .. " -> name.new")
+-- print("8 " .. file.addsuffix("name.old","new",{"old","bar"}) .. " -> name.old")
+
+local suffix = period * (1-period-slashes)^1 * -1
+local pattern = Cs((1-suffix)^0)
+
+function file.replacesuffix(name,suffix)
+ if name and suffix and suffix ~= "" then
+ return lpegmatch(pattern,name) .. "." .. suffix
+ else
+ return name
+ end
+end
+
+--
+
+local reslasher = lpeg.replacer(P("\\"),"/")
+
+function file.reslash(str)
+ return str and lpegmatch(reslasher,str)
+end
+
+-- We should be able to use:
+--
+-- local writable = P(1) * P("w") * Cc(true)
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(pathpart(name,"."))
+-- return a and lpegmatch(writable,a.permissions) or false
+-- end
+--
+-- But after some testing Taco and I came up with the more robust
+-- variant:
+
+function file.is_writable(name)
+ if not name then
+ -- error
+ elseif lfs.isdir(name) then
+ name = name .. "/m_t_x_t_e_s_t.tmp"
+ local f = io.open(name,"wb")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ end
+ return false
+end
+
+local readable = P("r") * Cc(true)
+
+function file.is_readable(name)
+ if name then
+ local a = attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
+end
+
+file.isreadable = file.is_readable -- depricated
+file.iswritable = file.is_writable -- depricated
+
+function file.size(name)
+ if name then
+ local a = attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
+
+function file.splitpath(str,separator) -- string .. reslash is a bonus (we could do a direct split)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
+end
+
+function file.joinpath(tab,separator) -- table
+ return tab and concat(tab,separator or io.pathseparator) -- can have trailing //
+end
+
+local someslash = S("\\/")
+local stripper = Cs(P(fwslash)^0/"" * reslasher)
+local isnetwork = someslash * someslash * (1-someslash)
+ + (1-fwslash-colon)^1 * colon
+local isroot = fwslash^1 * -1
+local hasroot = fwslash^1
+
+local reslasher = lpeg.replacer(S("\\/"),"/")
+local deslasher = lpeg.replacer(S("\\/")^1,"/")
+
+-- If we have a network or prefix then there is a change that we end up with two
+-- // in the middle ... we could prevent this if we (1) expand prefixes: and (2)
+-- split and rebuild as url. Of course we could assume no network paths (which
+-- makes sense) adn assume either mapped drives (windows) or mounts (unix) but
+-- then we still have to deal with urls ... anyhow, multiple // are never a real
+-- problem but just ugly.
+
+-- function file.join(...)
+-- local lst = { ... }
+-- local one = lst[1]
+-- if lpegmatch(isnetwork,one) then
+-- local one = lpegmatch(reslasher,one)
+-- local two = lpegmatch(deslasher,concat(lst,"/",2))
+-- if lpegmatch(hasroot,two) then
+-- return one .. two
+-- else
+-- return one .. "/" .. two
+-- end
+-- elseif lpegmatch(isroot,one) then
+-- local two = lpegmatch(deslasher,concat(lst,"/",2))
+-- if lpegmatch(hasroot,two) then
+-- return two
+-- else
+-- return "/" .. two
+-- end
+-- elseif one == "" then
+-- return lpegmatch(stripper,concat(lst,"/",2))
+-- else
+-- return lpegmatch(deslasher,concat(lst,"/"))
+-- end
+-- end
+
+function file.join(one, two, three, ...)
+ if not two then
+ return one == "" and one or lpegmatch(stripper,one)
+ end
+ if one == "" then
+ return lpegmatch(stripper,three and concat({ two, three, ... },"/") or two)
+ end
+ if lpegmatch(isnetwork,one) then
+ local one = lpegmatch(reslasher,one)
+ local two = lpegmatch(deslasher,three and concat({ two, three, ... },"/") or two)
+ if lpegmatch(hasroot,two) then
+ return one .. two
+ else
+ return one .. "/" .. two
+ end
+ elseif lpegmatch(isroot,one) then
+ local two = lpegmatch(deslasher,three and concat({ two, three, ... },"/") or two)
+ if lpegmatch(hasroot,two) then
+ return two
+ else
+ return "/" .. two
+ end
+ else
+ return lpegmatch(deslasher,concat({ one, two, three, ... },"/"))
+ end
+end
+
+-- or we can use this:
+--
+-- function file.join(...)
+-- local n = select("#",...)
+-- local one = select(1,...)
+-- if n == 1 then
+-- return one == "" and one or lpegmatch(stripper,one)
+-- end
+-- if one == "" then
+-- return lpegmatch(stripper,n > 2 and concat({ ... },"/",2) or select(2,...))
+-- end
+-- if lpegmatch(isnetwork,one) then
+-- local one = lpegmatch(reslasher,one)
+-- local two = lpegmatch(deslasher,n > 2 and concat({ ... },"/",2) or select(2,...))
+-- if lpegmatch(hasroot,two) then
+-- return one .. two
+-- else
+-- return one .. "/" .. two
+-- end
+-- elseif lpegmatch(isroot,one) then
+-- local two = lpegmatch(deslasher,n > 2 and concat({ ... },"/",2) or select(2,...))
+-- if lpegmatch(hasroot,two) then
+-- return two
+-- else
+-- return "/" .. two
+-- end
+-- else
+-- return lpegmatch(deslasher,concat({ ... },"/"))
+-- end
+-- end
+
+-- print(file.join("c:/whatever"))
+-- print(file.join("c:/whatever","name"))
+-- print(file.join("//","/y"))
+-- print(file.join("/","/y"))
+-- print(file.join("","/y"))
+-- print(file.join("/x/","/y"))
+-- print(file.join("x/","/y"))
+-- print(file.join("http://","/y"))
+-- print(file.join("http://a","/y"))
+-- print(file.join("http:///a","/y"))
+-- print(file.join("//nas-1","/y"))
+-- print(file.join("//nas-1/a/b/c","/y"))
+-- print(file.join("\\\\nas-1\\a\\b\\c","\\y"))
+
+-- The previous one fails on "a.b/c" so Taco came up with a split based
+-- variant. After some skyping we got it sort of compatible with the old
+-- one. After that the anchoring to currentdir was added in a better way.
+-- Of course there are some optimizations too. Finally we had to deal with
+-- windows drive prefixes and things like sys://. Eventually gsubs and
+-- finds were replaced by lpegs.
+
+local drivespec = R("az","AZ")^1 * colon
+local anchors = fwslash
+ + drivespec
+local untouched = periods
+ + (1-period)^1 * P(-1)
+local mswindrive = Cs(drivespec * (bwslash/"/" + fwslash)^0)
+local mswinuncpath = (bwslash + fwslash) * (bwslash + fwslash) * Cc("//")
+local splitstarter = (mswindrive + mswinuncpath + Cc(false))
+ * Ct(lpeg.splitat(S("/\\")^1))
+local absolute = fwslash
+
+function file.collapsepath(str,anchor) -- anchor: false|nil, true, "."
+ if not str then
+ return
+ end
+ if anchor == true and not lpegmatch(anchors,str) then
+ str = getcurrentdir() .. "/" .. str
+ end
+ if str == "" or str =="." then
+ return "."
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
+ end
+ local starter, oldelements = lpegmatch(splitstarter,str)
+ local newelements = { }
+ local i = #oldelements
+ while i > 0 do
+ local element = oldelements[i]
+ if element == '.' then
+ -- do nothing
+ elseif element == '..' then
+ local n = i - 1
+ while n > 0 do
+ local element = oldelements[n]
+ if element ~= '..' and element ~= '.' then
+ oldelements[n] = '.'
+ break
+ else
+ n = n - 1
+ end
+ end
+ if n < 1 then
+ insert(newelements,1,'..')
+ end
+ elseif element ~= "" then
+ insert(newelements,1,element)
+ end
+ i = i - 1
+ end
+ if #newelements == 0 then
+ return starter or "."
+ elseif starter then
+ return starter .. concat(newelements, '/')
+ elseif lpegmatch(absolute,str) then
+ return "/" .. concat(newelements,'/')
+ else
+ newelements = concat(newelements, '/')
+ if anchor == "." and find(str,"^%./") then
+ return "./" .. newelements
+ else
+ return newelements
+ end
+ end
+end
+
+-- local function test(str,...)
+-- print(string.format("%-20s %-15s %-30s %-20s",str,file.collapsepath(str),file.collapsepath(str,true),file.collapsepath(str,".")))
+-- end
+-- test("a/b.c/d") test("b.c/d") test("b.c/..")
+-- test("/") test("c:/..") test("sys://..")
+-- test("") test("./") test(".") test("..") test("./..") test("../..")
+-- test("a") test("./a") test("/a") test("a/../..")
+-- test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..")
+-- test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..")
+-- test("./a")
+-- test([[\\a.b.c\d\e]])
+
+local validchars = R("az","09","AZ","--","..")
+local pattern_a = lpeg.replacer(1-validchars)
+local pattern_a = Cs((validchars + P(1)/"-")^1)
+local whatever = P("-")^0 / ""
+local pattern_b = Cs(whatever * (1 - whatever * -1)^1)
+
+function file.robustname(str,strict)
+ if str then
+ str = lpegmatch(pattern_a,str) or str
+ if strict then
+ return lpegmatch(pattern_b,str) or str -- two step is cleaner (less backtracking)
+ else
+ return str
+ end
+ end
+end
+
+file.readdata = io.loaddata
+file.savedata = io.savedata
+
+function file.copy(oldname,newname)
+ if oldname and newname then
+ local data = io.loaddata(oldname)
+ if data and data ~= "" then
+ file.savedata(newname,data)
+ end
+ end
+end
+
+-- also rewrite previous
+
+local letter = R("az","AZ") + S("_-+")
+local separator = P("://")
+
+local qualified = period^0 * fwslash
+ + letter * colon
+ + letter^1 * separator
+ + letter^1 * fwslash
+local rootbased = fwslash
+ + letter * colon
+
+lpeg.patterns.qualified = qualified
+lpeg.patterns.rootbased = rootbased
+
+-- ./name ../name /name c: :// name/name
+
+function file.is_qualified_path(filename)
+ return filename and lpegmatch(qualified,filename) ~= nil
+end
+
+function file.is_rootbased_path(filename)
+ return filename and lpegmatch(rootbased,filename) ~= nil
+end
+
+-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
+--
+-- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
+-- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" }
+-- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
+-- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
+
+-- -- maybe:
+--
+-- if os.type == "windows" then
+-- local currentdir = getcurrentdir
+-- function getcurrentdir()
+-- return lpegmatch(reslasher,currentdir())
+-- end
+-- end
+
+-- for myself:
+
+function file.strip(name,dir)
+ if name then
+ local b, a = match(name,"^(.-)" .. dir .. "(.*)$")
+ return a ~= "" and a or name
+ end
+end
+
+-- local debuglist = {
+-- "pathpart", "basename", "nameonly", "suffixonly", "suffix", "dirname", "extname",
+-- "addsuffix", "removesuffix", "replacesuffix", "join",
+-- "strip","collapsepath", "joinpath", "splitpath",
+-- }
+
+-- for i=1,#debuglist do
+-- local name = debuglist[i]
+-- local f = file[name]
+-- file[name] = function(...)
+-- print(name,f(...))
+-- return f(...)
+-- end
+-- end
+
+-- a goodie: a dumb version of mkdirs (not used in context itself, only
+-- in generic usage)
+
+function lfs.mkdirs(path)
+ local full = ""
+ for sub in gmatch(path,"(/*[^\\/]+)") do -- accepts leading c: and /
+ full = full .. sub
+ -- lfs.isdir("/foo") mistakenly returns true on windows so
+ -- so we don't test and just make as that one is not too picky
+ lfs.mkdir(full)
+ end
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-function.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-function.lua
new file mode 100644
index 00000000000..7ded8ceecd5
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-function.lua
@@ -0,0 +1,11 @@
+if not modules then modules = { } end modules ['l-functions'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+functions = functions or { }
+
+function functions.dummy() end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-io.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-io.lua
new file mode 100644
index 00000000000..a91d44d8776
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-io.lua
@@ -0,0 +1,358 @@
+if not modules then modules = { } end modules ['l-io'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local io = io
+local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format
+local concat = table.concat
+local floor = math.floor
+local type = type
+
+if string.find(os.getenv("PATH"),";",1,true) then
+ io.fileseparator, io.pathseparator = "\\", ";"
+else
+ io.fileseparator, io.pathseparator = "/" , ":"
+end
+
+local function readall(f)
+ return f:read("*all")
+end
+
+-- The next one is upto 50% faster on large files and less memory consumption due
+-- to less intermediate large allocations. This phenomena was discussed on the
+-- luatex dev list.
+
+local function readall(f)
+ local size = f:seek("end")
+ if size == 0 then
+ return ""
+ elseif size < 1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done = f:seek("set",0)
+ local step
+ if size < 1024*1024 then
+ step = 1024 * 1024
+ elseif size > 16*1024*1024 then
+ step = 16*1024*1024
+ else
+ step = floor(size/(1024*1024)) * 1024 * 1024 / 8
+ end
+ local data = { }
+ while true do
+ local r = f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1] = r
+ end
+ end
+ end
+end
+
+io.readall = readall
+
+function io.loaddata(filename,textmode) -- return nil if empty
+ local f = io.open(filename,(textmode and 'r') or 'rb')
+ if f then
+ -- local data = f:read('*all')
+ local data = readall(f)
+ f:close()
+ if #data > 0 then
+ return data
+ end
+ end
+end
+
+function io.savedata(filename,data,joiner)
+ local f = io.open(filename,"wb")
+ if f then
+ if type(data) == "table" then
+ f:write(concat(data,joiner or ""))
+ elseif type(data) == "function" then
+ data(f)
+ else
+ f:write(data or "")
+ end
+ f:close()
+ io.flush()
+ return true
+ else
+ return false
+ end
+end
+
+-- we can also chunk this one if needed: io.lines(filename,chunksize,"*l")
+
+function io.loadlines(filename,n) -- return nil if empty
+ local f = io.open(filename,'r')
+ if not f then
+ -- no file
+ elseif n then
+ local lines = { }
+ for i=1,n do
+ local line = f:read("*lines")
+ if line then
+ lines[#lines+1] = line
+ else
+ break
+ end
+ end
+ f:close()
+ lines = concat(lines,"\n")
+ if #lines > 0 then
+ return lines
+ end
+ else
+ local line = f:read("*line") or ""
+ f:close()
+ if #line > 0 then
+ return line
+ end
+ end
+end
+
+function io.loadchunk(filename,n)
+ local f = io.open(filename,'rb')
+ if f then
+ local data = f:read(n or 1024)
+ f:close()
+ if #data > 0 then
+ return data
+ end
+ end
+end
+
+function io.exists(filename)
+ local f = io.open(filename)
+ if f == nil then
+ return false
+ else
+ f:close()
+ return true
+ end
+end
+
+function io.size(filename)
+ local f = io.open(filename)
+ if f == nil then
+ return 0
+ else
+ local s = f:seek("end")
+ f:close()
+ return s
+ end
+end
+
+function io.noflines(f)
+ if type(f) == "string" then
+ local f = io.open(filename)
+ if f then
+ local n = f and io.noflines(f) or 0
+ f:close()
+ return n
+ else
+ return 0
+ end
+ else
+ local n = 0
+ for _ in f:lines() do
+ n = n + 1
+ end
+ f:seek('set',0)
+ return n
+ end
+end
+
+local nextchar = {
+ [ 4] = function(f)
+ return f:read(1,1,1,1)
+ end,
+ [ 2] = function(f)
+ return f:read(1,1)
+ end,
+ [ 1] = function(f)
+ return f:read(1)
+ end,
+ [-2] = function(f)
+ local a, b = f:read(1,1)
+ return b, a
+ end,
+ [-4] = function(f)
+ local a, b, c, d = f:read(1,1,1,1)
+ return d, c, b, a
+ end
+}
+
+function io.characters(f,n)
+ if f then
+ return nextchar[n or 1], f
+ end
+end
+
+local nextbyte = {
+ [4] = function(f)
+ local a, b, c, d = f:read(1,1,1,1)
+ if d then
+ return byte(a), byte(b), byte(c), byte(d)
+ end
+ end,
+ [3] = function(f)
+ local a, b, c = f:read(1,1,1)
+ if b then
+ return byte(a), byte(b), byte(c)
+ end
+ end,
+ [2] = function(f)
+ local a, b = f:read(1,1)
+ if b then
+ return byte(a), byte(b)
+ end
+ end,
+ [1] = function (f)
+ local a = f:read(1)
+ if a then
+ return byte(a)
+ end
+ end,
+ [-2] = function (f)
+ local a, b = f:read(1,1)
+ if b then
+ return byte(b), byte(a)
+ end
+ end,
+ [-3] = function(f)
+ local a, b, c = f:read(1,1,1)
+ if b then
+ return byte(c), byte(b), byte(a)
+ end
+ end,
+ [-4] = function(f)
+ local a, b, c, d = f:read(1,1,1,1)
+ if d then
+ return byte(d), byte(c), byte(b), byte(a)
+ end
+ end
+}
+
+function io.bytes(f,n)
+ if f then
+ return nextbyte[n or 1], f
+ else
+ return nil, nil
+ end
+end
+
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(format(" [%s]",concat(options,"|")))
+ end
+ if default then
+ io.write(format(" [%s]",default))
+ end
+ io.write(format(" "))
+ io.flush()
+ local answer = io.read()
+ answer = gsub(answer,"^%s*(.*)%s*$","%1")
+ if answer == "" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for k=1,#options do
+ if options[k] == answer then
+ return answer
+ end
+ end
+ local pattern = "^" .. answer
+ for k=1,#options do
+ local v = options[k]
+ if find(v,pattern) then
+ return v
+ end
+ end
+ end
+ end
+end
+
+local function readnumber(f,n,m)
+ if m then
+ f:seek("set",n)
+ n = m
+ end
+ if n == 1 then
+ return byte(f:read(1))
+ elseif n == 2 then
+ local a, b = byte(f:read(2),1,2)
+ return 256 * a + b
+ elseif n == 3 then
+ local a, b, c = byte(f:read(3),1,3)
+ return 256*256 * a + 256 * b + c
+ elseif n == 4 then
+ local a, b, c, d = byte(f:read(4),1,4)
+ return 256*256*256 * a + 256*256 * b + 256 * c + d
+ elseif n == 8 then
+ local a, b = readnumber(f,4), readnumber(f,4)
+ return 256 * a + b
+ elseif n == 12 then
+ local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4)
+ return 256*256 * a + 256 * b + c
+ elseif n == -2 then
+ local b, a = byte(f:read(2),1,2)
+ return 256*a + b
+ elseif n == -3 then
+ local c, b, a = byte(f:read(3),1,3)
+ return 256*256 * a + 256 * b + c
+ elseif n == -4 then
+ local d, c, b, a = byte(f:read(4),1,4)
+ return 256*256*256 * a + 256*256 * b + 256*c + d
+ elseif n == -8 then
+ local h, g, f, e, d, c, b, a = byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256 * a +
+ 256*256*256*256*256*256 * b +
+ 256*256*256*256*256 * c +
+ 256*256*256*256 * d +
+ 256*256*256 * e +
+ 256*256 * f +
+ 256 * g +
+ h
+ else
+ return 0
+ end
+end
+
+io.readnumber = readnumber
+
+function io.readstring(f,n,m)
+ if m then
+ f:seek("set",n)
+ n = m
+ end
+ local str = gsub(f:read(n),"\000","")
+ return str
+end
+
+-- This works quite ok:
+--
+-- function io.piped(command,writer)
+-- local pipe = io.popen(command)
+-- -- for line in pipe:lines() do
+-- -- print(line)
+-- -- end
+-- while true do
+-- local line = pipe:read(1)
+-- if not line then
+-- break
+-- elseif line ~= "\n" then
+-- writer(line)
+-- end
+-- end
+-- return pipe:close() -- ok, status, (error)code
+-- end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-lpeg.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-lpeg.lua
new file mode 100644
index 00000000000..5be12468bc6
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-lpeg.lua
@@ -0,0 +1,1173 @@
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- lpeg 12 vs lpeg 10: slower compilation, similar parsing speed (i need to check
+-- if i can use new features like capture / 2 and .B (at first sight the xml
+-- parser is some 5% slower)
+
+-- lpeg.P("abc") is faster than lpeg.P("a") * lpeg.P("b") * lpeg.P("c")
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+-- move utf -> l-unicode
+-- move string -> l-string or keep it here
+
+lpeg = require("lpeg")
+
+-- The latest lpeg doesn't have print any more, and even the new ones are not
+-- available by default (only when debug mode is enabled), which is a pitty as
+-- as it helps nailing down bottlenecks. Performance seems comparable: some 10%
+-- slower pattern compilation, same parsing speed, although,
+--
+-- local p = lpeg.C(lpeg.P(1)^0 * lpeg.P(-1))
+-- local a = string.rep("123",100)
+-- lpeg.match(p,a)
+--
+-- seems slower and is also still suboptimal (i.e. a match that runs from begin
+-- to end, one of the cases where string matchers win).
+
+if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+-- some code will move to unicode and string
+
+-- local lpmatch = lpeg.match
+-- local lpprint = lpeg.print
+-- local lpp = lpeg.P
+-- local lpr = lpeg.R
+-- local lps = lpeg.S
+-- local lpc = lpeg.C
+-- local lpb = lpeg.B
+-- local lpv = lpeg.V
+-- local lpcf = lpeg.Cf
+-- local lpcb = lpeg.Cb
+-- local lpcg = lpeg.Cg
+-- local lpct = lpeg.Ct
+-- local lpcs = lpeg.Cs
+-- local lpcc = lpeg.Cc
+-- local lpcmt = lpeg.Cmt
+-- local lpcarg = lpeg.Carg
+
+-- function lpeg.match(l,...) print("LPEG MATCH") lpprint(l) return lpmatch(l,...) end
+
+-- function lpeg.P (l) local p = lpp (l) print("LPEG P =") lpprint(l) return p end
+-- function lpeg.R (l) local p = lpr (l) print("LPEG R =") lpprint(l) return p end
+-- function lpeg.S (l) local p = lps (l) print("LPEG S =") lpprint(l) return p end
+-- function lpeg.C (l) local p = lpc (l) print("LPEG C =") lpprint(l) return p end
+-- function lpeg.B (l) local p = lpb (l) print("LPEG B =") lpprint(l) return p end
+-- function lpeg.V (l) local p = lpv (l) print("LPEG V =") lpprint(l) return p end
+-- function lpeg.Cf (l) local p = lpcf (l) print("LPEG Cf =") lpprint(l) return p end
+-- function lpeg.Cb (l) local p = lpcb (l) print("LPEG Cb =") lpprint(l) return p end
+-- function lpeg.Cg (l) local p = lpcg (l) print("LPEG Cg =") lpprint(l) return p end
+-- function lpeg.Ct (l) local p = lpct (l) print("LPEG Ct =") lpprint(l) return p end
+-- function lpeg.Cs (l) local p = lpcs (l) print("LPEG Cs =") lpprint(l) return p end
+-- function lpeg.Cc (l) local p = lpcc (l) print("LPEG Cc =") lpprint(l) return p end
+-- function lpeg.Cmt (l) local p = lpcmt (l) print("LPEG Cmt =") lpprint(l) return p end
+-- function lpeg.Carg (l) local p = lpcarg(l) print("LPEG Carg =") lpprint(l) return p end
+
+local type, next, tostring = type, next, tostring
+local byte, char, gmatch, format = string.byte, string.char, string.gmatch, string.format
+----- mod, div = math.mod, math.div
+local floor = math.floor
+
+local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt
+local lpegtype, lpegmatch, lpegprint = lpeg.type, lpeg.match, lpeg.print
+
+-- let's start with an inspector:
+
+if setinspector then
+ setinspector("lpeg",function(v) if lpegtype(v) then lpegprint(v) return true end end)
+end
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local sign = S('+-')
+local zero = P('0')
+local digit = R('09')
+local octdigit = R("07")
+local lowercase = R("az")
+local uppercase = R("AZ")
+local underscore = P("_")
+local hexdigit = digit + lowercase + uppercase
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+----- newline = crlf + S("\r\n") -- cr + lf
+local newline = P("\r") * (P("\n") + P(true)) + P("\n")
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+local period = P(".")
+local comma = P(",")
+
+local utfbom_32_be = P('\000\000\254\255') -- 00 00 FE FF
+local utfbom_32_le = P('\255\254\000\000') -- FF FE 00 00
+local utfbom_16_be = P('\254\255') -- FE FF
+local utfbom_16_le = P('\255\254') -- FF FE
+local utfbom_8 = P('\239\187\191') -- EF BB BF
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le")
+ + utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le")
+ + utfbom_8 * Cc("utf-8") + alwaysmatched * Cc("utf-8") -- assume utf8
+local utfstricttype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le")
+ + utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le")
+ + utfbom_8 * Cc("utf-8")
+local utfoffset = utfbom_32_be * Cc(4) + utfbom_32_le * Cc(4)
+ + utfbom_16_be * Cc(2) + utfbom_16_le * Cc(2)
+ + utfbom_8 * Cc(3) + Cc(0)
+
+local utf8next = R("\128\191")
+
+patterns.utfbom_32_be = utfbom_32_be
+patterns.utfbom_32_le = utfbom_32_le
+patterns.utfbom_16_be = utfbom_16_be
+patterns.utfbom_16_le = utfbom_16_le
+patterns.utfbom_8 = utfbom_8
+
+patterns.utf_16_be_nl = P("\000\r\000\n") + P("\000\r") + P("\000\n") -- P("\000\r") * (P("\000\n") + P(true)) + P("\000\n")
+patterns.utf_16_le_nl = P("\r\000\n\000") + P("\r\000") + P("\n\000") -- P("\r\000") * (P("\n\000") + P(true)) + P("\n\000")
+
+patterns.utf_32_be_nl = P("\000\000\000\r\000\000\000\n") + P("\000\000\000\r") + P("\000\000\000\n")
+patterns.utf_32_le_nl = P("\r\000\000\000\n\000\000\000") + P("\r\000\000\000") + P("\n\000\000\000")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+patterns.utfstricttype = utfstricttype
+patterns.utfoffset = utfoffset
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+local utf8character = P(1) * R("\128\191")^0 -- unchecked but fast
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.utf8character = utf8character -- this one can be used in most cases so we might use that one
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+local eol = S("\n\r")
+local spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+local whitespace = eol + spacer
+local nonspacer = 1 - spacer
+local nonwhitespace = 1 - whitespace
+
+patterns.eol = eol
+patterns.spacer = spacer
+patterns.whitespace = whitespace
+patterns.nonspacer = nonspacer
+patterns.nonwhitespace = nonwhitespace
+
+local stripper = spacer ^0 * C((spacer ^0 * nonspacer ^1)^0) -- from example by roberto
+local fullstripper = whitespace^0 * C((whitespace^0 * nonwhitespace^1)^0)
+
+----- collapser = Cs(spacer^0/"" * ((spacer^1 * endofstring / "") + (spacer^1/" ") + P(1))^0)
+local collapser = Cs(spacer^0/"" * nonspacer^0 * ((spacer^0/" " * nonspacer^1)^0))
+
+local b_collapser = Cs( whitespace^0 /"" * (nonwhitespace^1 + whitespace^1/" ")^0)
+local e_collapser = Cs((whitespace^1 * P(-1)/"" + nonwhitespace^1 + whitespace^1/" ")^0)
+local m_collapser = Cs( (nonwhitespace^1 + whitespace^1/" ")^0)
+
+local b_stripper = Cs( spacer^0 /"" * (nonspacer^1 + spacer^1/" ")^0)
+local e_stripper = Cs((spacer^1 * P(-1)/"" + nonspacer^1 + spacer^1/" ")^0)
+local m_stripper = Cs( (nonspacer^1 + spacer^1/" ")^0)
+
+patterns.stripper = stripper
+patterns.fullstripper = fullstripper
+patterns.collapser = collapser
+
+patterns.b_collapser = b_collapser
+patterns.m_collapser = m_collapser
+patterns.e_collapser = e_collapser
+
+patterns.b_stripper = b_stripper
+patterns.m_stripper = m_stripper
+patterns.e_stripper = e_stripper
+
+patterns.lowercase = lowercase
+patterns.uppercase = uppercase
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.equal = P("=")
+patterns.comma = comma
+patterns.commaspacer = comma * spacer^0
+patterns.period = period
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = underscore
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"") -- will change to C in the middle
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"") -- will change to C in the middle
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.singlequoted = squote * patterns.nosquote * squote
+patterns.doublequoted = dquote * patterns.nodquote * dquote
+patterns.quoted = patterns.doublequoted + patterns.singlequoted
+
+patterns.digit = digit
+patterns.octdigit = octdigit
+patterns.hexdigit = hexdigit
+patterns.sign = sign
+patterns.cardinal = digit^1
+patterns.integer = sign^-1 * digit^1
+patterns.unsigned = digit^0 * period * digit^1
+patterns.float = sign^-1 * patterns.unsigned
+patterns.cunsigned = digit^0 * comma * digit^1
+patterns.cpunsigned = digit^0 * (period + comma) * digit^1
+patterns.cfloat = sign^-1 * patterns.cunsigned
+patterns.cpfloat = sign^-1 * patterns.cpunsigned
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.cpnumber = patterns.cpfloat + patterns.integer
+patterns.oct = zero * octdigit^1
+patterns.octal = patterns.oct
+patterns.HEX = zero * P("X") * (digit+uppercase)^1
+patterns.hex = zero * P("x") * (digit+lowercase)^1
+patterns.hexadecimal = zero * S("xX") * hexdigit^1
+
+patterns.hexafloat = sign^-1
+ * zero * S("xX")
+ * (hexdigit^0 * period * hexdigit^1 + hexdigit^1 * period * hexdigit^0 + hexdigit^1)
+ * (S("pP") * sign^-1 * hexdigit^1)^-1
+patterns.decafloat = sign^-1
+ * (digit^0 * period * digit^1 + digit^1 * period * digit^0 + digit^1)
+ * S("eE") * sign^-1 * digit^1
+
+patterns.propername = (uppercase + lowercase + underscore) * (uppercase + lowercase + underscore + digit)^0 * endofstring
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+patterns.longtostring = Cs(whitespace^0/"" * ((patterns.quoted + nonwhitespace^1 + whitespace^1/"" * (P(-1) + Cc(" ")))^0))
+
+local function anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) }
+end
+
+lpeg.anywhere = anywhere
+
+function lpeg.instringchecker(p)
+ p = anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
+end
+
+-- local p = splitat("->",false) print(lpegmatch(p,"oeps->what->more")) -- oeps what more
+-- local p = splitat("->",true) print(lpegmatch(p,"oeps->what->more")) -- oeps what->more
+-- local p = splitat("->",false) print(lpegmatch(p,"oeps")) -- oeps
+-- local p = splitat("->",true) print(lpegmatch(p,"oeps")) -- oeps
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return lpegmatch(c,str)
+end
+
+function string.split(str,separator)
+ if separator then
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
+ end
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return lpegmatch(linesplitter,str)
+end
+
+-- lpeg.splitters = cache -- no longer public
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return lpegmatch(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return lpegmatch(c,str)
+end
+
+-- from roberto's site:
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+--~ local str = " a b c d "
+
+--~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpegmatch(s,str).."]")
+--~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpegmatch(s,str).."]")
+--~ local s = lpeg.stripper("ab") print("["..lpegmatch(s,str).."]")
+--~ local s = lpeg.keeper("ab") print("["..lpegmatch(s,str).."]")
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(anything^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * endofstring)^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+-- todo: cache when string
+
+function lpeg.replacer(one,two,makefunction,isutf) -- in principle we should sort the keys
+ local pattern
+ local u = isutf and utf8char or 1
+ if type(one) == "table" then
+ local no = #one
+ local p = P(false)
+ if no == 0 then
+ for k, v in next, one do
+ p = p + P(k) / v
+ end
+ pattern = Cs((p + u)^0)
+ elseif no == 1 then
+ local o = one[1]
+ one, two = P(o[1]), o[2]
+ -- pattern = Cs(((1-one)^1 + one/two)^0)
+ pattern = Cs((one/two + u)^0)
+ else
+ for i=1,no do
+ local o = one[i]
+ p = p + P(o[1]) / o[2]
+ end
+ pattern = Cs((p + u)^0)
+ end
+ else
+ pattern = Cs((P(one)/(two or "") + u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+
+-- local pattern1 = P(1-P(pattern))^0 * P(pattern) : test for not nil
+-- local pattern2 = (P(pattern) * Cc(true) + P(1))^0 : test for true (could be faster, but not much)
+
+function lpeg.finder(lst,makefunction,isutf) -- beware: slower than find with 'patternless finds'
+ local pattern
+ if type(lst) == "table" then
+ pattern = P(false)
+ if #lst == 0 then
+ for k, v in next, lst do
+ pattern = pattern + P(k) -- ignore key, so we can use a replacer table
+ end
+ else
+ for i=1,#lst do
+ pattern = pattern + P(lst[i])
+ end
+ end
+ else
+ pattern = P(lst)
+ end
+ if isutf then
+ pattern = ((utf8char or 1)-pattern)^0 * pattern
+ else
+ pattern = (1-pattern)^0 * pattern
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+
+-- print(lpeg.match(lpeg.replacer("e","a"),"test test"))
+-- print(lpeg.match(lpeg.replacer{{"e","a"}},"test test"))
+-- print(lpeg.match(lpeg.replacer({ e = "a", t = "x" }),"test test"))
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ local pattern = P(separator)
+ splitter = C((1 - pattern)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ local pattern = P(separator)
+ splitter = (1 - pattern)^0 * pattern * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+local splitters_s, splitters_p = { }, { }
+
+function lpeg.beforesuffix(separator) -- nil if nothing but empty is ok
+ local splitter = splitters_s[separator]
+ if not splitter then
+ local pattern = P(separator)
+ splitter = C((1 - pattern)^0) * pattern * endofstring
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.afterprefix(separator) -- nil if nothing but empty is ok
+ local splitter = splitters_p[separator]
+ if not splitter then
+ local pattern = P(separator)
+ splitter = pattern * C(anything^0)
+ splitters_p[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+-- print(1,lpegmatch(lpeg.firstofsplit(":"),"bc:de"))
+-- print(2,lpegmatch(lpeg.firstofsplit(":"),":de")) -- empty
+-- print(3,lpegmatch(lpeg.firstofsplit(":"),"bc"))
+-- print(4,lpegmatch(lpeg.secondofsplit(":"),"bc:de"))
+-- print(5,lpegmatch(lpeg.secondofsplit(":"),"bc:")) -- empty
+-- print(6,lpegmatch(lpeg.secondofsplit(":",""),"bc"))
+-- print(7,lpegmatch(lpeg.secondofsplit(":"),"bc"))
+-- print(9,lpegmatch(lpeg.secondofsplit(":","123"),"bc"))
+
+-- -- slower:
+--
+-- function lpeg.counter(pattern)
+-- local n, pattern = 0, (lpeg.P(pattern)/function() n = n + 1 end + lpeg.anything)^0
+-- return function(str) n = 0 ; lpegmatch(pattern,str) ; return n end
+-- end
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #lpegmatch(pattern,str)
+ end
+end
+
+-- utf extensies
+
+utf = utf or (unicode and unicode.utf8) or { }
+
+local utfcharacters = utf and utf.characters or string.utfcharacters
+local utfgmatch = utf and utf.gmatch
+local utfchar = utf and utf.char
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p = P(false)
+ for uc in utfcharacters(str) do
+ p = p + P(uc)
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p = P(false)
+ for uc in utfgmatch(str,".") do
+ p = p + P(uc)
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p = P(false)
+ local f = function(uc)
+ p = p + P(uc)
+ end
+ lpegmatch((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = utf8byte * utf8byte + Cc(false) -- utf8byte is already a capture
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = lpegmatch(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and (last - first < 8) then -- a somewhat arbitrary criterium
+ local p = P(false)
+ for i=first,last do
+ p = p + P(utfchar(i))
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ -- tricky, these nested captures
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+-- print(lpeg.match(lpeg.Cs((C(lpeg.UR("αω"))/{ ["χ"] = "OEPS" })^0),"αωχαω"))
+
+-- lpeg.print(lpeg.R("ab","cd","gh"))
+-- lpeg.print(lpeg.P("a","b","c"))
+-- lpeg.print(lpeg.S("a","b","c"))
+
+-- print(lpeg.count("äáàa",lpeg.P("á") + lpeg.P("à")))
+-- print(lpeg.count("äáàa",lpeg.UP("áà")))
+-- print(lpeg.count("äáàa",lpeg.US("àá")))
+-- print(lpeg.count("äáàa",lpeg.UR("aá")))
+-- print(lpeg.count("äáàa",lpeg.UR("àá")))
+-- print(lpeg.count("äáàa",lpeg.UR(0x0000,0xFFFF)))
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- table.sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort = table.sort
+
+local function copyindexed(old)
+ local new = { }
+ for i=1,#old do
+ new[i] = old
+ end
+ return new
+end
+
+local function sortedkeys(tab)
+ local keys, s = { }, 0
+ for key,_ in next, tab do
+ s = s + 1
+ keys[s] = key
+ end
+ sort(keys)
+ return keys
+end
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = copyindexed(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local p_false = P(false)
+local p_true = P(true)
+
+local function make(t)
+ local function making(t)
+ local p = p_false
+ local keys = sortedkeys(t)
+ for i=1,#keys do
+ local k = keys[i]
+ if k ~= "" then
+ local v = t[k]
+ if v == true then
+ p = p + P(k) * p_true
+ elseif v == false then
+ -- can't happen
+ else
+ p = p + P(k) * making(v)
+ end
+ end
+ end
+ if t[""] then
+ p = p + p_true
+ end
+ return p
+ end
+ local p = p_false
+ local keys = sortedkeys(t)
+ for i=1,#keys do
+ local k = keys[i]
+ if k ~= "" then
+ local v = t[k]
+ if v == true then
+ p = p + P(k) * p_true
+ elseif v == false then
+ -- can't happen
+ else
+ p = p + P(k) * making(v)
+ end
+ end
+ end
+ return p
+end
+
+local function collapse(t,x)
+ if type(t) ~= "table" then
+ return t, x
+ else
+ local n = next(t)
+ if n == nil then
+ return t, x
+ elseif next(t,n) == nil then
+ -- one entry
+ local k = n
+ local v = t[k]
+ if type(v) == "table" then
+ return collapse(v,x..k)
+ else
+ return v, x .. k
+ end
+ else
+ local tt = { }
+ for k, v in next, t do
+ local vv, kk = collapse(v,k)
+ tt[kk] = vv
+ end
+ return tt, x
+ end
+ end
+end
+
+function lpeg.utfchartabletopattern(list) -- goes to util-lpg
+ local tree = { }
+ local n = #list
+ if n == 0 then
+ for s in next, list do
+ local t = tree
+ local p, pk
+ for c in gmatch(s,".") do
+ if t == true then
+ t = { [c] = true, [""] = true }
+ p[pk] = t
+ p = t
+ t = false
+ elseif t == false then
+ t = { [c] = false }
+ p[pk] = t
+ p = t
+ t = false
+ else
+ local tc = t[c]
+ if not tc then
+ tc = false
+ t[c] = false
+ end
+ p = t
+ t = tc
+ end
+ pk = c
+ end
+ if t == false then
+ p[pk] = true
+ elseif t == true then
+ -- okay
+ else
+ t[""] = true
+ end
+ end
+ else
+ for i=1,n do
+ local s = list[i]
+ local t = tree
+ local p, pk
+ for c in gmatch(s,".") do
+ if t == true then
+ t = { [c] = true, [""] = true }
+ p[pk] = t
+ p = t
+ t = false
+ elseif t == false then
+ t = { [c] = false }
+ p[pk] = t
+ p = t
+ t = false
+ else
+ local tc = t[c]
+ if not tc then
+ tc = false
+ t[c] = false
+ end
+ p = t
+ t = tc
+ end
+ pk = c
+ end
+ if t == false then
+ p[pk] = true
+ elseif t == true then
+ -- okay
+ else
+ t[""] = true
+ end
+ end
+ end
+-- collapse(tree,"") -- needs testing, maybe optional, slightly faster because P("x")*P("X") seems slower than P"(xX") (why)
+-- inspect(tree)
+ return make(tree)
+end
+
+-- local t = { "start", "stoep", "staart", "paard" }
+-- local p = lpeg.Cs((lpeg.utfchartabletopattern(t)/string.upper + 1)^1)
+
+-- local t = { "a", "abc", "ac", "abe", "abxyz", "xy", "bef","aa" }
+-- local p = lpeg.Cs((lpeg.utfchartabletopattern(t)/string.upper + 1)^1)
+
+-- inspect(lpegmatch(p,"a"))
+-- inspect(lpegmatch(p,"aa"))
+-- inspect(lpegmatch(p,"aaaa"))
+-- inspect(lpegmatch(p,"ac"))
+-- inspect(lpegmatch(p,"bc"))
+-- inspect(lpegmatch(p,"zzbczz"))
+-- inspect(lpegmatch(p,"zzabezz"))
+-- inspect(lpegmatch(p,"ab"))
+-- inspect(lpegmatch(p,"abc"))
+-- inspect(lpegmatch(p,"abe"))
+-- inspect(lpegmatch(p,"xa"))
+-- inspect(lpegmatch(p,"bx"))
+-- inspect(lpegmatch(p,"bax"))
+-- inspect(lpegmatch(p,"abxyz"))
+-- inspect(lpegmatch(p,"foobarbefcrap"))
+
+-- local t = { ["^"] = 1, ["^^"] = 2, ["^^^"] = 3, ["^^^^"] = 4 }
+-- local p = lpeg.Cs((lpeg.utfchartabletopattern(t)/t + 1)^1)
+-- inspect(lpegmatch(p," ^ ^^ ^^^ ^^^^ ^^^^^ ^^^^^^ ^^^^^^^ "))
+
+-- local t = { ["^^"] = 2, ["^^^"] = 3, ["^^^^"] = 4 }
+-- local p = lpeg.Cs((lpeg.utfchartabletopattern(t)/t + 1)^1)
+-- inspect(lpegmatch(p," ^ ^^ ^^^ ^^^^ ^^^^^ ^^^^^^ ^^^^^^^ "))
+
+-- lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- }
+
+-- a few handy ones:
+--
+-- faster than find(str,"[\n\r]") when match and # > 7 and always faster when # > 3
+
+patterns.containseol = lpeg.finder(eol) -- (1-eol)^0 * eol
+
+-- The next pattern^n variant is based on an approach suggested
+-- by Roberto: constructing a big repetition in chunks.
+--
+-- Being sparse is not needed, and only complicate matters and
+-- the number of redundant entries is not that large.
+
+local function nextstep(n,step,result)
+ local m = n % step -- mod(n,step)
+ local d = floor(n/step) -- div(n,step)
+ if d > 0 then
+ local v = V(tostring(step))
+ local s = result.start
+ for i=1,d do
+ if s then
+ s = v * s
+ else
+ s = v
+ end
+ end
+ result.start = s
+ end
+ if step > 1 and result.start then
+ local v = V(tostring(step/2))
+ result[tostring(step)] = v * v
+ end
+ if step > 0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
+end
+
+function lpeg.times(pattern,n)
+ return P(nextstep(n,2^16,{ "start", ["1"] = pattern }))
+end
+
+-- local p = lpeg.Cs((1 - lpeg.times(lpeg.P("AB"),25))^1)
+-- local s = "12" .. string.rep("AB",20) .. "34" .. string.rep("AB",30) .. "56"
+-- inspect(p)
+-- print(lpeg.match(p,s))
+
+-- moved here (before util-str)
+
+----- digit = R("09")
+----- period = P(".")
+----- zero = P("0")
+local trailingzeros = zero^0 * -digit -- suggested by Roberto R
+local case_1 = period * trailingzeros / ""
+local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
+local number = digit^1 * (case_1 + case_2)
+local stripper = Cs((number + 1)^0)
+
+lpeg.patterns.stripzeros = stripper
+
+-- local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100"
+-- collectgarbage("collect")
+-- str = string.rep(sample,10000)
+-- local ts = os.clock()
+-- lpegmatch(stripper,str)
+-- print(#str, os.clock()-ts, lpegmatch(stripper,sample))
+
+-- for practical reasone we keep this here:
+
+local byte_to_HEX = { }
+local byte_to_hex = { }
+local byte_to_dec = { } -- for md5
+local hex_to_byte = { }
+
+for i=0,255 do
+ local H = format("%02X",i)
+ local h = format("%02x",i)
+ local d = format("%03i",i)
+ local c = char(i)
+ byte_to_HEX[c] = H
+ byte_to_hex[c] = h
+ byte_to_dec[c] = d
+ hex_to_byte[h] = c
+ hex_to_byte[H] = c
+end
+
+local hextobyte = P(2)/hex_to_byte
+local bytetoHEX = P(1)/byte_to_HEX
+local bytetohex = P(1)/byte_to_hex
+local bytetodec = P(1)/byte_to_dec
+local hextobytes = Cs(hextobyte^0)
+local bytestoHEX = Cs(bytetoHEX^0)
+local bytestohex = Cs(bytetohex^0)
+local bytestodec = Cs(bytetodec^0)
+
+patterns.hextobyte = hextobyte
+patterns.bytetoHEX = bytetoHEX
+patterns.bytetohex = bytetohex
+patterns.bytetodec = bytetodec
+patterns.hextobytes = hextobytes
+patterns.bytestoHEX = bytestoHEX
+patterns.bytestohex = bytestohex
+patterns.bytestodec = bytestodec
+
+function string.toHEX(s)
+ if not s or s == "" then
+ return s
+ else
+ return lpegmatch(bytestoHEX,s)
+ end
+end
+
+function string.tohex(s)
+ if not s or s == "" then
+ return s
+ else
+ return lpegmatch(bytestohex,s)
+ end
+end
+
+function string.todec(s)
+ if not s or s == "" then
+ return s
+ else
+ return lpegmatch(bytestodec,s)
+ end
+end
+
+function string.tobytes(s)
+ if not s or s == "" then
+ return s
+ else
+ return lpegmatch(hextobytes,s)
+ end
+end
+
+-- local h = "ADFE0345"
+-- local b = lpegmatch(patterns.hextobytes,h)
+-- print(h,b,string.tohex(b),string.toHEX(b))
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-lua.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-lua.lua
new file mode 100644
index 00000000000..cb6182907bf
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-lua.lua
@@ -0,0 +1,192 @@
+if not modules then modules = { } end modules ['l-lua'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- potential issues with 5.3:
+
+-- i'm not sure yet if the int/float change is good for luatex
+
+-- math.min
+-- math.max
+-- tostring
+-- tonumber
+-- utf.*
+-- bit32
+
+-- compatibility hacksand helpers
+
+local major, minor = string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+
+_MAJORVERSION = tonumber(major) or 5
+_MINORVERSION = tonumber(minor) or 1
+_LUAVERSION = _MAJORVERSION + _MINORVERSION/10
+
+-- lpeg
+
+if not lpeg then
+ lpeg = require("lpeg")
+end
+
+-- basics:
+
+if loadstring then
+
+ local loadnormal = load
+
+ function load(first,...)
+ if type(first) == "string" then
+ return loadstring(first,...)
+ else
+ return loadnormal(first,...)
+ end
+ end
+
+else
+
+ loadstring = load
+
+end
+
+-- table:
+
+-- At some point it was announced that i[pairs would be dropped, which makes
+-- sense. As we already used the for loop and # in most places the impact on
+-- ConTeXt was not that large; the remaining ipairs already have been replaced.
+-- Hm, actually ipairs was retained, but we no longer use it anyway (nor
+-- pairs).
+--
+-- Just in case, we provide the fallbacks as discussed in Programming
+-- in Lua (http://www.lua.org/pil/7.3.html):
+
+if not ipairs then
+
+ -- for k, v in ipairs(t) do ... end
+ -- for k=1,#t do local v = t[k] ... end
+
+ local function iterate(a,i)
+ i = i + 1
+ local v = a[i]
+ if v ~= nil then
+ return i, v --, nil
+ end
+ end
+
+ function ipairs(a)
+ return iterate, a, 0
+ end
+
+end
+
+if not pairs then
+
+ -- for k, v in pairs(t) do ... end
+ -- for k, v in next, t do ... end
+
+ function pairs(t)
+ return next, t -- , nil
+ end
+
+end
+
+-- The unpack function has been moved to the table table, and for compatiility
+-- reasons we provide both now.
+
+if not table.unpack then
+
+ table.unpack = _G.unpack
+
+elseif not unpack then
+
+ _G.unpack = table.unpack
+
+end
+
+-- package:
+
+-- if not package.seachers then
+--
+-- package.searchers = package.loaders -- 5.2
+--
+-- elseif not package.loaders then
+--
+-- package.loaders = package.searchers
+--
+-- end
+
+if not package.loaders then -- brr, searchers is a special "loadlib function" userdata type
+
+ package.loaders = package.searchers
+
+end
+
+-- moved from util-deb to here:
+
+local print, select, tostring = print, select, tostring
+
+local inspectors = { }
+
+function setinspector(kind,inspector) -- global function
+ inspectors[kind] = inspector
+end
+
+function inspect(...) -- global function
+ for s=1,select("#",...) do
+ local value = select(s,...)
+ if value == nil then
+ print("nil")
+ else
+ local done = false
+ -- type driven (table)
+ local kind = type(value)
+ local inspector = inspectors[kind]
+ if inspector then
+ done = inspector(value)
+ if done then
+ break
+ end
+ end
+ -- whatever driven (token, node, ...)
+ for kind, inspector in next, inspectors do
+ done = inspector(value)
+ if done then
+ break
+ end
+ end
+ if not done then
+ print(tostring(value))
+ end
+ end
+ end
+end
+
+--
+
+local dummy = function() end
+
+function optionalrequire(...)
+ local ok, result = xpcall(require,dummy,...)
+ if ok then
+ return result
+ end
+end
+
+-- nice for non ascii scripts (this might move):
+
+if lua then
+ lua.mask = load([[τεχ = 1]]) and "utf" or "ascii"
+end
+
+local flush = io.flush
+
+if flush then
+
+ local execute = os.execute if execute then function os.execute(...) flush() return execute(...) end end
+ local exec = os.exec if exec then function os.exec (...) flush() return exec (...) end end
+ local spawn = os.spawn if spawn then function os.spawn (...) flush() return spawn (...) end end
+ local popen = io.popen if popen then function io.popen (...) flush() return popen (...) end end
+
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-math.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-math.lua
new file mode 100644
index 00000000000..ec62919b46a
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-math.lua
@@ -0,0 +1,38 @@
+if not modules then modules = { } end modules ['l-math'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan
+
+if not math.ceiling then
+ math.ceiling = math.ceil
+end
+
+if not math.round then
+ function math.round(x) return floor(x + 0.5) end
+end
+
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
+end
+
+if not math.mod then
+ function math.mod(n,m) return n % m end
+end
+
+local pipi = 2*math.pi/360
+
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
+end
+
+if not math.odd then
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-string.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-string.lua
new file mode 100644
index 00000000000..e9dc2bbbcff
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-string.lua
@@ -0,0 +1,213 @@
+if not modules then modules = { } end modules ['l-string'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local string = string
+local sub, gmatch, format, char, byte, rep, lower = string.sub, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local P, S, C, Ct, Cc, Cs = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.Cs
+
+-- Some functions are already defined in l-lpeg and maybe some from here will
+-- move there (unless we also expose caches).
+
+-- if not string.split then
+--
+-- function string.split(str,pattern)
+-- local t = { }
+-- if #str > 0 then
+-- local n = 1
+-- for s in gmatch(str..pattern,"(.-)"..pattern) do
+-- t[n] = s
+-- n = n + 1
+-- end
+-- end
+-- return t
+-- end
+--
+-- end
+
+-- function string.unquoted(str)
+-- return (gsub(str,"^([\"\'])(.*)%1$","%2")) -- interesting pattern
+-- end
+
+local unquoted = patterns.squote * C(patterns.nosquote) * patterns.squote
+ + patterns.dquote * C(patterns.nodquote) * patterns.dquote
+
+function string.unquoted(str)
+ return lpegmatch(unquoted,str) or str
+end
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function string.quoted(str)
+ return format("%q",str) -- always double quote
+end
+
+function string.count(str,pattern) -- variant 3
+ local n = 0
+ for _ in gmatch(str,pattern) do -- not for utf
+ n = n + 1
+ end
+ return n
+end
+
+function string.limit(str,n,sentinel) -- not utf proof
+ if #str > n then
+ sentinel = sentinel or "..."
+ return sub(str,1,(n-#sentinel)) .. sentinel
+ else
+ return str
+ end
+end
+
+local stripper = patterns.stripper
+local fullstripper = patterns.fullstripper
+local collapser = patterns.collapser
+local longtostring = patterns.longtostring
+
+function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+end
+
+function string.fullstrip(str)
+ return lpegmatch(fullstripper,str) or ""
+end
+
+function string.collapsespaces(str)
+ return lpegmatch(collapser,str) or ""
+end
+
+function string.longtostring(str)
+ return lpegmatch(longtostring,str) or ""
+end
+
+-- function string.is_empty(str)
+-- return not find(str,"%S")
+-- end
+
+local pattern = P(" ")^0 * P(-1) -- maybe also newlines
+
+-- patterns.onlyspaces = pattern
+
+function string.is_empty(str)
+ if str == "" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+
+-- if not string.escapedpattern then
+--
+-- local patterns_escapes = {
+-- ["%"] = "%%",
+-- ["."] = "%.",
+-- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+-- ["["] = "%[", ["]"] = "%]",
+-- ["("] = "%(", [")"] = "%)",
+-- -- ["{"] = "%{", ["}"] = "%}"
+-- -- ["^"] = "%^", ["$"] = "%$",
+-- }
+--
+-- local simple_escapes = {
+-- ["-"] = "%-",
+-- ["."] = "%.",
+-- ["?"] = ".",
+-- ["*"] = ".*",
+-- }
+--
+-- function string.escapedpattern(str,simple)
+-- return (gsub(str,".",simple and simple_escapes or patterns_escapes))
+-- end
+--
+-- function string.topattern(str,lowercase,strict)
+-- if str == "" then
+-- return ".*"
+-- else
+-- str = gsub(str,".",simple_escapes)
+-- if lowercase then
+-- str = lower(str)
+-- end
+-- if strict then
+-- return "^" .. str .. "$"
+-- else
+-- return str
+-- end
+-- end
+-- end
+--
+-- end
+
+--- needs checking
+
+local anything = patterns.anything
+local allescapes = Cc("%") * S(".-+%?()[]*") -- also {} and ^$ ?
+local someescapes = Cc("%") * S(".-+%()[]") -- also {} and ^$ ?
+local matchescapes = Cc(".") * S("*?") -- wildcard and single match
+
+local pattern_a = Cs ( ( allescapes + anything )^0 )
+local pattern_b = Cs ( ( someescapes + matchescapes + anything )^0 )
+local pattern_c = Cs ( Cc("^") * ( someescapes + matchescapes + anything )^0 * Cc("$") )
+
+function string.escapedpattern(str,simple)
+ return lpegmatch(simple and pattern_b or pattern_a,str)
+end
+
+function string.topattern(str,lowercase,strict)
+ if str=="" or type(str) ~= "string" then
+ return ".*"
+ elseif strict then
+ str = lpegmatch(pattern_c,str)
+ else
+ str = lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
+ end
+end
+
+-- print(string.escapedpattern("12+34*.tex",false))
+-- print(string.escapedpattern("12+34*.tex",true))
+-- print(string.topattern ("12+34*.tex",false,false))
+-- print(string.topattern ("12+34*.tex",false,true))
+
+function string.valid(str,default)
+ return (type(str) == "string" and str ~= "" and str) or default or nil
+end
+
+-- handy fallback
+
+string.itself = function(s) return s end
+
+-- also handy (see utf variant)
+
+local pattern_c = Ct( C(1) ^0) -- string and not utf !
+local pattern_b = Ct((C(1)/byte)^0)
+
+function string.totable(str,bytes)
+ return lpegmatch(bytes and pattern_b or pattern_c,str)
+end
+
+-- handy from within tex:
+
+local replacer = lpeg.replacer("@","%%") -- Watch the escaped % in lpeg!
+
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+
+-- obsolete names:
+
+string.quote = string.quoted
+string.unquote = string.unquoted
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-table.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-table.lua
new file mode 100644
index 00000000000..552097e1c10
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-l-table.lua
@@ -0,0 +1,1265 @@
+if not modules then modules = { } end modules ['l-table'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, next, tostring, tonumber, ipairs, select = type, next, tostring, tonumber, ipairs, select
+local table, string = table, string
+local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
+local format, lower, dump = string.format, string.lower, string.dump
+local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local floor = math.floor
+
+-- extra functions, some might go (when not used)
+--
+-- we could serialize using %a but that won't work well is in the code we mostly use
+-- floats and as such we get unequality e.g. in version comparisons
+
+local stripper = patterns.stripper
+
+function table.strip(tab)
+ local lst, l = { }, 0
+ for i=1,#tab do
+ local s = lpegmatch(stripper,tab[i]) or ""
+ if s == "" then
+ -- skip this one
+ else
+ l = l + 1
+ lst[l] = s
+ end
+ end
+ return lst
+end
+
+function table.keys(t)
+ if t then
+ local keys, k = { }, 0
+ for key in next, t do
+ k = k + 1
+ keys[k] = key
+ end
+ return keys
+ else
+ return { }
+ end
+end
+
+-- local function compare(a,b)
+-- local ta = type(a) -- needed, else 11 < 2
+-- local tb = type(b) -- needed, else 11 < 2
+-- if ta == tb and ta == "number" then
+-- return a < b
+-- else
+-- return tostring(a) < tostring(b) -- not that efficient
+-- end
+-- end
+
+-- local function compare(a,b)
+-- local ta = type(a) -- needed, else 11 < 2
+-- local tb = type(b) -- needed, else 11 < 2
+-- if ta == tb and (ta == "number" or ta == "string") then
+-- return a < b
+-- else
+-- return tostring(a) < tostring(b) -- not that efficient
+-- end
+-- end
+
+-- local function sortedkeys(tab)
+-- if tab then
+-- local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
+-- for key in next, tab do
+-- s = s + 1
+-- srt[s] = key
+-- if category == 3 then
+-- -- no further check
+-- else
+-- local tkey = type(key)
+-- if tkey == "string" then
+-- category = (category == 2 and 3) or 1
+-- elseif tkey == "number" then
+-- category = (category == 1 and 3) or 2
+-- else
+-- category = 3
+-- end
+-- end
+-- end
+-- if category == 0 or category == 3 then
+-- sort(srt,compare)
+-- else
+-- sort(srt)
+-- end
+-- return srt
+-- else
+-- return { }
+-- end
+-- end
+
+-- local function compare(a,b)
+-- local ta = type(a) -- needed, else 11 < 2
+-- local tb = type(b) -- needed, else 11 < 2
+-- if ta == tb and (ta == "number" or ta == "string") then
+-- return a < b
+-- else
+-- return tostring(a) < tostring(b) -- not that efficient
+-- end
+-- end
+
+-- local function compare(a,b)
+-- local ta = type(a) -- needed, else 11 < 2
+-- if ta == "number" or ta == "string" then
+-- local tb = type(b) -- needed, else 11 < 2
+-- if ta == tb then
+-- return a < b
+-- end
+-- end
+-- return tostring(a) < tostring(b) -- not that efficient
+-- end
+
+local function compare(a,b)
+ local ta = type(a) -- needed, else 11 < 2
+ if ta == "number" then
+ local tb = type(b) -- needed, else 11 < 2
+ if ta == tb then
+ return a < b
+ elseif tb == "string" then
+ return tostring(a) < b
+ end
+ elseif ta == "string" then
+ local tb = type(b) -- needed, else 11 < 2
+ if ta == tb then
+ return a < b
+ else
+ return a < tostring(b)
+ end
+ end
+ return tostring(a) < tostring(b) -- not that efficient
+end
+
+local function sortedkeys(tab)
+ if tab then
+ local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
+ for key in next, tab do
+ s = s + 1
+ srt[s] = key
+ if category == 3 then
+ -- no further check
+ elseif category == 1 then
+ if type(key) ~= "string" then
+ category = 3
+ end
+ elseif category == 2 then
+ if type(key) ~= "number" then
+ category = 3
+ end
+ else
+ local tkey = type(key)
+ if tkey == "string" then
+ category = 1
+ elseif tkey == "number" then
+ category = 2
+ else
+ category = 3
+ end
+ end
+ end
+ if s < 2 then
+ -- nothing to sort
+ elseif category == 3 then
+ sort(srt,compare)
+ else
+ sort(srt)
+ end
+ return srt
+ else
+ return { }
+ end
+end
+
+local function sortedhashonly(tab)
+ if tab then
+ local srt, s = { }, 0
+ for key in next, tab do
+ if type(key) == "string" then
+ s = s + 1
+ srt[s] = key
+ end
+ end
+ if s > 1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return { }
+ end
+end
+
+local function sortedindexonly(tab)
+ if tab then
+ local srt, s = { }, 0
+ for key in next, tab do
+ if type(key) == "number" then
+ s = s + 1
+ srt[s] = key
+ end
+ end
+ if s > 1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return { }
+ end
+end
+
+local function sortedhashkeys(tab,cmp) -- fast one
+ if tab then
+ local srt, s = { }, 0
+ for key in next, tab do
+ if key then
+ s= s + 1
+ srt[s] = key
+ end
+ end
+ if s > 1 then
+ sort(srt,cmp)
+ end
+ return srt
+ else
+ return { }
+ end
+end
+
+function table.allkeys(t)
+ local keys = { }
+ for k, v in next, t do
+ for k in next, v do
+ keys[k] = true
+ end
+ end
+ return sortedkeys(keys)
+end
+
+table.sortedkeys = sortedkeys
+table.sortedhashonly = sortedhashonly
+table.sortedindexonly = sortedindexonly
+table.sortedhashkeys = sortedhashkeys
+
+local function nothing() end
+
+local function sortedhash(t,cmp)
+ if t then
+ local s
+ if cmp then
+ -- it would be nice if the sort function would accept a third argument (or nicer, an optional first)
+ s = sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s = sortedkeys(t) -- the robust one
+ end
+ local m = #s
+ if m == 1 then
+ return next, t
+ elseif m > 0 then
+ local n = 0
+ return function()
+ if n < m then
+ n = n + 1
+ local k = s[n]
+ return k, t[k]
+ end
+ end
+ end
+ end
+ return nothing
+end
+
+table.sortedhash = sortedhash
+table.sortedpairs = sortedhash -- obsolete
+
+function table.append(t,list)
+ local n = #t
+ for i=1,#list do
+ n = n + 1
+ t[n] = list[i]
+ end
+ return t
+end
+
+function table.prepend(t, list)
+ local nl = #list
+ local nt = nl + #t
+ for i=#t,1,-1 do
+ t[nt] = t[i]
+ nt = nt - 1
+ end
+ for i=1,#list do
+ t[i] = list[i]
+ end
+ return t
+end
+
+-- function table.merge(t, ...) -- first one is target
+-- t = t or { }
+-- local lst = { ... }
+-- for i=1,#lst do
+-- for k, v in next, lst[i] do
+-- t[k] = v
+-- end
+-- end
+-- return t
+-- end
+
+function table.merge(t, ...) -- first one is target
+ t = t or { }
+ for i=1,select("#",...) do
+ for k, v in next, (select(i,...)) do
+ t[k] = v
+ end
+ end
+ return t
+end
+
+-- function table.merged(...)
+-- local tmp, lst = { }, { ... }
+-- for i=1,#lst do
+-- for k, v in next, lst[i] do
+-- tmp[k] = v
+-- end
+-- end
+-- return tmp
+-- end
+
+function table.merged(...)
+ local t = { }
+ for i=1,select("#",...) do
+ for k, v in next, (select(i,...)) do
+ t[k] = v
+ end
+ end
+ return t
+end
+
+-- function table.imerge(t, ...)
+-- local lst, nt = { ... }, #t
+-- for i=1,#lst do
+-- local nst = lst[i]
+-- for j=1,#nst do
+-- nt = nt + 1
+-- t[nt] = nst[j]
+-- end
+-- end
+-- return t
+-- end
+
+function table.imerge(t, ...)
+ local nt = #t
+ for i=1,select("#",...) do
+ local nst = select(i,...)
+ for j=1,#nst do
+ nt = nt + 1
+ t[nt] = nst[j]
+ end
+ end
+ return t
+end
+
+-- function table.imerged(...)
+-- local tmp, ntmp, lst = { }, 0, {...}
+-- for i=1,#lst do
+-- local nst = lst[i]
+-- for j=1,#nst do
+-- ntmp = ntmp + 1
+-- tmp[ntmp] = nst[j]
+-- end
+-- end
+-- return tmp
+-- end
+
+function table.imerged(...)
+ local tmp, ntmp = { }, 0
+ for i=1,select("#",...) do
+ local nst = select(i,...)
+ for j=1,#nst do
+ ntmp = ntmp + 1
+ tmp[ntmp] = nst[j]
+ end
+ end
+ return tmp
+end
+
+local function fastcopy(old,metatabletoo) -- fast one
+ if old then
+ local new = { }
+ for k, v in next, old do
+ if type(v) == "table" then
+ new[k] = fastcopy(v,metatabletoo) -- was just table.copy
+ else
+ new[k] = v
+ end
+ end
+ if metatabletoo then
+ -- optional second arg
+ local mt = getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
+ end
+ return new
+ else
+ return { }
+ end
+end
+
+-- todo : copy without metatable
+
+local function copy(t, tables) -- taken from lua wiki, slightly adapted
+ tables = tables or { }
+ local tcopy = { }
+ if not tables[t] then
+ tables[t] = tcopy
+ end
+ for i,v in next, t do -- brrr, what happens with sparse indexed
+ if type(i) == "table" then
+ if tables[i] then
+ i = tables[i]
+ else
+ i = copy(i, tables)
+ end
+ end
+ if type(v) ~= "table" then
+ tcopy[i] = v
+ elseif tables[v] then
+ tcopy[i] = tables[v]
+ else
+ tcopy[i] = copy(v, tables)
+ end
+ end
+ local mt = getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
+ return tcopy
+end
+
+table.fastcopy = fastcopy
+table.copy = copy
+
+function table.derive(parent) -- for the moment not public
+ local child = { }
+ if parent then
+ setmetatable(child,{ __index = parent })
+ end
+ return child
+end
+
+function table.tohash(t,value)
+ local h = { }
+ if t then
+ if value == nil then value = true end
+ for _, v in next, t do -- no ipairs here
+ h[v] = value
+ end
+ end
+ return h
+end
+
+function table.fromhash(t)
+ local hsh, h = { }, 0
+ for k, v in next, t do -- no ipairs here
+ if v then
+ h = h + 1
+ hsh[h] = k
+ end
+ end
+ return hsh
+end
+
+local noquotes, hexify, handle, compact, inline, functions
+
+local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key
+ 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if',
+ 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while',
+ 'NaN', 'goto',
+}
+
+-- local function simple_table(t)
+-- if #t > 0 then
+-- local n = 0
+-- for _,v in next, t do
+-- n = n + 1
+-- end
+-- if n == #t then
+-- local tt, nt = { }, 0
+-- for i=1,#t do
+-- local v = t[i]
+-- local tv = type(v)
+-- if tv == "number" then
+-- nt = nt + 1
+-- if hexify then
+-- tt[nt] = format("0x%X",v)
+-- else
+-- tt[nt] = tostring(v) -- tostring not needed
+-- end
+-- elseif tv == "string" then
+-- nt = nt + 1
+-- tt[nt] = format("%q",v)
+-- elseif tv == "boolean" then
+-- nt = nt + 1
+-- tt[nt] = v and "true" or "false"
+-- else
+-- return nil
+-- end
+-- end
+-- return tt
+-- end
+-- end
+-- return nil
+-- end
+
+local function simple_table(t)
+ local nt = #t
+ if nt > 0 then
+ local n = 0
+ for _,v in next, t do
+ n = n + 1
+ -- if type(v) == "table" then
+ -- return nil
+ -- end
+ end
+ if n == nt then
+ local tt = { }
+ for i=1,nt do
+ local v = t[i]
+ local tv = type(v)
+ if tv == "number" then
+ if hexify then
+ tt[i] = format("0x%X",v)
+ else
+ tt[i] = tostring(v) -- tostring not needed
+ end
+ elseif tv == "string" then
+ tt[i] = format("%q",v)
+ elseif tv == "boolean" then
+ tt[i] = v and "true" or "false"
+ else
+ return nil
+ end
+ end
+ return tt
+ end
+ end
+ return nil
+end
+
+-- Because this is a core function of mkiv I moved some function calls
+-- inline.
+--
+-- twice as fast in a test:
+--
+-- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) )
+
+-- problem: there no good number_to_string converter with the best resolution
+
+-- probably using .. is faster than format
+-- maybe split in a few cases (yes/no hexify)
+
+-- todo: %g faster on numbers than %s
+
+-- we can speed this up with repeaters and formatters but we haven't defined them
+-- yet
+
+local propername = patterns.propername -- was find(name,"^%a[%w%_]*$")
+
+local function dummy() end
+
+local function do_serialize(root,name,depth,level,indexed)
+ if level > 0 then
+ depth = depth .. " "
+ if indexed then
+ handle(format("%s{",depth))
+ else
+ local tn = type(name)
+ if tn == "number" then
+ if hexify then
+ handle(format("%s[0x%X]={",depth,name))
+ else
+ handle(format("%s[%s]={",depth,name))
+ end
+ elseif tn == "string" then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
+ handle(format("%s%s={",depth,name))
+ else
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn == "boolean" then
+ handle(format("%s[%s]={",depth,name and "true" or "false"))
+ else
+ handle(format("%s{",depth))
+ end
+ end
+ end
+ -- we could check for k (index) being number (cardinal)
+ if root and next(root) ~= nil then
+ local first, last = nil, 0
+ if compact then
+ last = #root
+ for k=1,last do
+ if root[k] == nil then
+ last = k - 1
+ break
+ end
+ end
+ if last > 0 then
+ first = 1
+ end
+ end
+ local sk = sortedkeys(root)
+ for i=1,#sk do
+ local k = sk[i]
+ local v = root[k]
+ local tv = type(v)
+ local tk = type(k)
+ if compact and first and tk == "number" and k >= first and k <= last then
+ if tv == "number" then
+ if hexify then
+ handle(format("%s 0x%X,",depth,v))
+ else
+ handle(format("%s %s,",depth,v)) -- %.99g
+ end
+ elseif tv == "string" then
+ handle(format("%s %q,",depth,v))
+ elseif tv == "table" then
+ if next(v) == nil then
+ handle(format("%s {},",depth))
+ elseif inline then -- and #t > 0
+ local st = simple_table(v)
+ if st then
+ handle(format("%s { %s },",depth,concat(st,", ")))
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ elseif tv == "boolean" then
+ handle(format("%s %s,",depth,v and "true" or "false"))
+ elseif tv == "function" then
+ if functions then
+ handle(format('%s load(%q),',depth,dump(v))) -- maybe strip
+ else
+ handle(format('%s "function",',depth))
+ end
+ else
+ handle(format("%s %q,",depth,tostring(v)))
+ end
+ elseif k == "__p__" then -- parent
+ if false then
+ handle(format("%s __p__=nil,",depth))
+ end
+ elseif tv == "number" then
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%X]=0x%X,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g
+ end
+ elseif tk == "boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v))
+ else
+ handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) -- %.99g
+ end
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ if hexify then
+ handle(format("%s %s=0x%X,",depth,k,v))
+ else
+ handle(format("%s %s=%s,",depth,k,v)) -- %.99g
+ end
+ else
+ if hexify then
+ handle(format("%s [%q]=0x%X,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g
+ end
+ end
+ elseif tv == "string" then
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,v))
+ else
+ handle(format("%s [%s]=%q,",depth,k,v))
+ end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
+ else
+ handle(format("%s [%q]=%q,",depth,k,v))
+ end
+ elseif tv == "table" then
+ if next(v) == nil then
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%X]={},",depth,k))
+ else
+ handle(format("%s [%s]={},",depth,k))
+ end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]={},",depth,k and "true" or "false"))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={},",depth,k))
+ else
+ handle(format("%s [%q]={},",depth,k))
+ end
+ elseif inline then
+ local st = simple_table(v)
+ if st then
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
+ end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", ")))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ elseif tv == "boolean" then
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false"))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
+ end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false"))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v and "true" or "false"))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v and "true" or "false"))
+ end
+ elseif tv == "function" then
+ if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v) -- maybe strip
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v) -- maybe strip
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%X]=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%s]=load(%q),",depth,k,f))
+ end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%q]=load(%q),",depth,k,f))
+ end
+ end
+ else
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%q,",depth,k,tostring(v)))
+ end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%q]=%q,",depth,k,tostring(v)))
+ end
+ end
+ end
+ end
+ if level > 0 then
+ handle(format("%s},",depth))
+ end
+end
+
+-- replacing handle by a direct t[#t+1] = ... (plus test) is not much
+-- faster (0.03 on 1.00 for zapfino.tma)
+
+local function serialize(_handle,root,name,specification) -- handle wins
+ local tname = type(name)
+ if type(specification) == "table" then
+ noquotes = specification.noquotes
+ hexify = specification.hexify
+ handle = _handle or specification.handle or print
+ functions = specification.functions
+ compact = specification.compact
+ inline = specification.inline and compact
+ if functions == nil then
+ functions = true
+ end
+ if compact == nil then
+ compact = true
+ end
+ if inline == nil then
+ inline = compact
+ end
+ else
+ noquotes = false
+ hexify = false
+ handle = _handle or print
+ compact = true
+ inline = true
+ functions = true
+ end
+ if tname == "string" then
+ if name == "return" then
+ handle("return {")
+ else
+ handle(name .. "={")
+ end
+ elseif tname == "number" then
+ if hexify then
+ handle(format("[0x%X]={",name))
+ else
+ handle("[" .. name .. "]={")
+ end
+ elseif tname == "boolean" then
+ if name then
+ handle("return {")
+ else
+ handle("{")
+ end
+ else
+ handle("t={")
+ end
+ if root then
+ -- The dummy access will initialize a table that has a delayed initialization
+ -- using a metatable. (maybe explicitly test for metatable)
+ if getmetatable(root) then -- todo: make this an option, maybe even per subtable
+ local dummy = root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_ = nil
+ end
+ -- Let's forget about empty tables.
+ if next(root) ~= nil then
+ do_serialize(root,name,"",0)
+ end
+ end
+ handle("}")
+end
+
+-- A version with formatters is some 20% faster than using format (because formatters are
+-- much faster) but of course, inlining the format using .. is then again faster .. anyway,
+-- as we do some pretty printing as well there is not that much to gain unless we make a
+-- 'fast' ugly variant as well. But, we would have to move the formatter to l-string then.
+
+-- name:
+--
+-- true : return { }
+-- false : { }
+-- nil : t = { }
+-- string : string = { }
+-- "return" : return { }
+-- number : [number] = { }
+
+function table.serialize(root,name,specification)
+ local t, n = { }, 0
+ local function flush(s)
+ n = n + 1
+ t[n] = s
+ end
+ serialize(flush,root,name,specification)
+ return concat(t,"\n")
+end
+
+-- local a = { e = { 1,2,3,4,5,6}, a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc" } } }
+-- local t = os.clock()
+-- for i=1,10000 do
+-- table.serialize(a)
+-- end
+-- print(os.clock()-t,table.serialize(a))
+
+table.tohandle = serialize
+
+local maxtab = 2*1024
+
+function table.tofile(filename,root,name,specification)
+ local f = io.open(filename,'w')
+ if f then
+ if maxtab > 1 then
+ local t, n = { }, 0
+ local function flush(s)
+ n = n + 1
+ t[n] = s
+ if n > maxtab then
+ f:write(concat(t,"\n"),"\n") -- hm, write(sometable) should be nice
+ t, n = { }, 0 -- we could recycle t if needed
+ end
+ end
+ serialize(flush,root,name,specification)
+ f:write(concat(t,"\n"),"\n")
+ else
+ local function flush(s)
+ f:write(s,"\n")
+ end
+ serialize(flush,root,name,specification)
+ end
+ f:close()
+ io.flush()
+ end
+end
+
+local function flattened(t,f,depth) -- also handles { nil, 1, nil, 2 }
+ if f == nil then
+ f = { }
+ depth = 0xFFFF
+ elseif tonumber(f) then
+ -- assume that only two arguments are given
+ depth = f
+ f = { }
+ elseif not depth then
+ depth = 0xFFFF
+ end
+ for k, v in next, t do
+ if type(k) ~= "number" then
+ if depth > 0 and type(v) == "table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1] = v
+ end
+ end
+ end
+ for k=1,#t do
+ local v = t[k]
+ if depth > 0 and type(v) == "table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1] = v
+ end
+ end
+ return f
+end
+
+table.flattened = flattened
+
+local function unnest(t,f) -- only used in mk, for old times sake
+ if not f then -- and only relevant for token lists
+ f = { } -- this one can become obsolete
+ end
+ for i=1,#t do
+ local v = t[i]
+ if type(v) == "table" then
+ if type(v[1]) == "table" then
+ unnest(v,f)
+ else
+ f[#f+1] = v
+ end
+ else
+ f[#f+1] = v
+ end
+ end
+ return f
+end
+
+function table.unnest(t) -- bad name
+ return unnest(t)
+end
+
+local function are_equal(a,b,n,m) -- indexed
+ if a and b and #a == #b then
+ n = n or 1
+ m = m or #a
+ for i=n,m do
+ local ai, bi = a[i], b[i]
+ if ai==bi then
+ -- same
+ elseif type(ai) == "table" and type(bi) == "table" then
+ if not are_equal(ai,bi) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+ else
+ return false
+ end
+end
+
+local function identical(a,b) -- assumes same structure
+ for ka, va in next, a do
+ local vb = b[ka]
+ if va == vb then
+ -- same
+ elseif type(va) == "table" and type(vb) == "table" then
+ if not identical(va,vb) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+end
+
+table.identical = identical
+table.are_equal = are_equal
+
+local function sparse(old,nest,keeptables)
+ local new = { }
+ for k, v in next, old do
+ if not (v == "" or v == false) then
+ if nest and type(v) == "table" then
+ v = sparse(v,nest)
+ if keeptables or next(v) ~= nil then
+ new[k] = v
+ end
+ else
+ new[k] = v
+ end
+ end
+ end
+ return new
+end
+
+table.sparse = sparse
+
+function table.compact(t)
+ return sparse(t,true,true)
+end
+
+function table.contains(t, v)
+ if t then
+ for i=1, #t do
+ if t[i] == v then
+ return i
+ end
+ end
+ end
+ return false
+end
+
+function table.count(t)
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
+ end
+ return n
+end
+
+function table.swapped(t,s) -- hash
+ local n = { }
+ if s then
+ for k, v in next, s do
+ n[k] = v
+ end
+ end
+ for k, v in next, t do
+ n[v] = k
+ end
+ return n
+end
+
+function table.mirrored(t) -- hash
+ local n = { }
+ for k, v in next, t do
+ n[v] = k
+ n[k] = v
+ end
+ return n
+end
+
+function table.reversed(t)
+ if t then
+ local tt, tn = { }, #t
+ if tn > 0 then
+ local ttn = 0
+ for i=tn,1,-1 do
+ ttn = ttn + 1
+ tt[ttn] = t[i]
+ end
+ end
+ return tt
+ end
+end
+
+function table.reverse(t)
+ if t then
+ local n = #t
+ for i=1,floor(n/2) do
+ local j = n - i + 1
+ t[i], t[j] = t[j], t[i]
+ end
+ return t
+ end
+end
+
+function table.sequenced(t,sep,simple) -- hash only
+ if not t then
+ return ""
+ end
+ local n = #t
+ local s = { }
+ if n > 0 then
+ -- indexed
+ for i=1,n do
+ s[i] = tostring(t[i])
+ end
+ else
+ -- hashed
+ n = 0
+ for k, v in sortedhash(t) do
+ if simple then
+ if v == true then
+ n = n + 1
+ s[n] = k
+ elseif v and v~= "" then
+ n = n + 1
+ s[n] = k .. "=" .. tostring(v)
+ end
+ else
+ n = n + 1
+ s[n] = k .. "=" .. tostring(v)
+ end
+ end
+ end
+ return concat(s,sep or " | ")
+end
+
+function table.print(t,...)
+ if type(t) ~= "table" then
+ print(tostring(t))
+ else
+ serialize(print,t,...)
+ end
+end
+
+if setinspector then
+ setinspector("table",function(v) if type(v) == "table" then serialize(print,v,"table") return true end end)
+end
+
+-- -- -- obsolete but we keep them for a while and might comment them later -- -- --
+
+-- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
+
+function table.sub(t,i,j)
+ return { unpack(t,i,j) }
+end
+
+-- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
+
+function table.is_empty(t)
+ return not t or next(t) == nil
+end
+
+function table.has_one_entry(t)
+ return t and next(t,next(t)) == nil
+end
+
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+function table.sorted(t,...)
+ sort(t,...)
+ return t -- still sorts in-place
+end
+
+--
+
+function table.values(t,s) -- optional sort flag
+ if t then
+ local values, keys, v = { }, { }, 0
+ for key, value in next, t do
+ if not keys[value] then
+ v = v + 1
+ values[v] = value
+ keys[k] = key
+ end
+ end
+ if s then
+ sort(values)
+ end
+ return values
+ else
+ return { }
+ end
+end
+
+-- maybe this will move to util-tab.lua
+
+-- for k, v in table.filtered(t,pattern) do ... end
+-- for k, v in table.filtered(t,pattern,true) do ... end
+-- for k, v in table.filtered(t,pattern,true,cmp) do ... end
+
+function table.filtered(t,pattern,sort,cmp)
+ if t and type(pattern) == "string" then
+ if sort then
+ local s
+ if cmp then
+ -- it would be nice if the sort function would accept a third argument (or nicer, an optional first)
+ s = sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s = sortedkeys(t) -- the robust one
+ end
+ local n = 0
+ local m = #s
+ local function kv(s)
+ while n < m do
+ n = n + 1
+ local k = s[n]
+ if find(k,pattern) then
+ return k, t[k]
+ end
+ end
+ end
+ return kv, s
+ else
+ local n = next(t)
+ local function iterator()
+ while n ~= nil do
+ local k = n
+ n = next(t,k)
+ if find(k,pattern) then
+ return k, t[k]
+ end
+ end
+ end
+ return iterator, t
+ end
+ else
+ return nothing
+ end
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-languages.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-languages.lua
new file mode 100644
index 00000000000..1ea8c1fd12a
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-languages.lua
@@ -0,0 +1,45 @@
+if not modules then modules = { } end modules ['luatex-languages'] = {
+ version = 1.001,
+ comment = "companion to luatex-languages.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- We borrow from ConTeXt.
+
+languages = languages or { }
+
+local loaded = { }
+
+function languages.loadpatterns(tag)
+ if not loaded[tag] then
+ loaded[tag] = 0
+ local filename = kpse.find_file("lang-" .. tag .. ".lua")
+ if filename and filename == "" then
+ print("<unknown language file for: " .. tag .. ">")
+ else
+ local whatever = loadfile(filename)
+ if type(whatever) == "function" then
+ whatever = whatever()
+ if type(whatever) == "table" then
+ local characters = whatever.patterns.characters or ""
+ local patterns = whatever.patterns.data or ""
+ local exceptions = whatever.exceptions.data or ""
+ local language = lang.new()
+ for b in string.utfvalues(characters) do
+ tex.setlccode(b,b)
+ end
+ lang.patterns(language, patterns)
+ lang.hyphenation(language, exceptions)
+ loaded[tag] = lang.id(language)
+ else
+ print("<invalid language table: " .. tag .. ">")
+ end
+ else
+ print("<invalid language file: " .. tag .. ">")
+ end
+ end
+ end
+ return loaded[tag]
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-languages.tex b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-languages.tex
new file mode 100644
index 00000000000..9778da39a44
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-languages.tex
@@ -0,0 +1,17 @@
+%D \module
+%D [ file=luatex-fonts,
+%D version=2009.12.01,
+%D title=\LUATEX\ Support Macros,
+%D subtitle=Generic \OPENTYPE\ Font Handler,
+%D author=Hans Hagen,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+
+%D Cf. discussion on \CONTEXT\ list:
+
+\directlua {
+ dofile(kpse.find_file("luatex-languages.lua","tex"))
+}
+
+\def\loadpatterns#1{\directlua{tex.language = languages.loadpatterns("#1")}}
+
+\endinput
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-math.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-math.lua
new file mode 100644
index 00000000000..c316182ba32
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-math.lua
@@ -0,0 +1,53 @@
+if not modules then modules = { } end modules ['luatex-math'] = {
+ version = 1.001,
+ comment = "companion to luatex-math.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local gaps = {
+ [0x1D455] = 0x0210E,
+ [0x1D49D] = 0x0212C,
+ [0x1D4A0] = 0x02130,
+ [0x1D4A1] = 0x02131,
+ [0x1D4A3] = 0x0210B,
+ [0x1D4A4] = 0x02110,
+ [0x1D4A7] = 0x02112,
+ [0x1D4A8] = 0x02133,
+ [0x1D4AD] = 0x0211B,
+ [0x1D4BA] = 0x0212F,
+ [0x1D4BC] = 0x0210A,
+ [0x1D4C4] = 0x02134,
+ [0x1D506] = 0x0212D,
+ [0x1D50B] = 0x0210C,
+ [0x1D50C] = 0x02111,
+ [0x1D515] = 0x0211C,
+ [0x1D51D] = 0x02128,
+ [0x1D53A] = 0x02102,
+ [0x1D53F] = 0x0210D,
+ [0x1D545] = 0x02115,
+ [0x1D547] = 0x02119,
+ [0x1D548] = 0x0211A,
+ [0x1D549] = 0x0211D,
+ [0x1D551] = 0x02124,
+}
+
+
+local function fixmath(tfmdata,key,value)
+ if value then
+ local characters = tfmdata.characters
+ for gap, mess in pairs(gaps) do
+ characters[gap] = characters[mess]
+ end
+ end
+end
+
+fonts.handlers.otf.features.register {
+ name = "fixmath",
+ description = "math font fixing",
+ manipulators = {
+ base = fixmath,
+ node = fixmath,
+ }
+}
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-math.tex b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-math.tex
new file mode 100644
index 00000000000..604b4a1f8d9
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-math.tex
@@ -0,0 +1,1874 @@
+%D \module
+%D [ file=luatex-math,
+%D version=2013.04.29,
+%D title=\LUATEX\ Support Macros,
+%D subtitle=An exmaple of math,
+%D author=Hans Hagen,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+
+%D This module is in no way a complete plain math implementation. I made this file
+%D because I needed it for a tutorial for (mostly) plain \TEX\ users. There are
+%D several ways to support math in \LUATEX, and this is just one of them. It was the
+%D quickest hack I could come up with and it stays somewhat close to the traditional
+%D approach (and thereby far from the \CONTEXT\ way). This file is mainly meant for
+%D Boguslaw Jackowski.
+
+% we provide a remap feature
+
+\directlua{dofile(kpse.find_file('luatex-math.lua'))}
+
+% a bunch of fonts:
+
+\let \teni = \relax
+\let \seveni = \relax
+\let \fivei = \relax
+\let \tensy = \relax
+\let \sevensy = \relax
+\let \fivesy = \relax
+\let \tenex = \relax
+\let \sevenbf = \relax
+\let \fivebf = \relax
+
+\def\latinmodern
+ {\font\tenrm = file:lmroman10-regular.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\sevenrm = file:lmroman7-regular.otf:+liga;+kern;+tlig;+trep at 7pt
+ \font\fiverm = file:lmroman5-regular.otf:+liga;+kern;+tlig;+trep at 5pt
+ %
+ \font\tentt = file:lmmono10-regular.otf at 10pt
+ \font\tensl = file:lmromanslant10-regular.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenit = file:lmroman10-italic.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenbf = file:lmroman10-bold.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenbi = file:lmroman10-bolditalic.otf:+liga;+kern;+tlig;+trep at 10pt
+ %
+ \font\mathfonttextupright = file:latinmodern-math.otf:ssty=0;fixmath=yes at 10pt
+ \font\mathfontscriptupright = file:latinmodern-math.otf:ssty=1;fixmath=yes at 7pt
+ \font\mathfontscriptscriptupright = file:latinmodern-math.otf:ssty=2;fixmath=yes at 5pt
+ %
+ \textfont 0 = \mathfonttextupright
+ \scriptfont 0 = \mathfontscriptupright
+ \scriptscriptfont 0 = \mathfontscriptscriptupright
+ %
+ \tenrm}
+
+\def\lucidabright
+ {\font\tenrm = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\sevenrm = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 7pt
+ \font\fiverm = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 5pt
+ %
+ \font\tentt = file:lucidabrightot.otf at 10pt
+ \font\tenit = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenit = file:lucidabrightot-italic.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenbf = file:lucidabrightot-demi.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenbi = file:lucidabrightot-demiitalic.otf:+liga;+kern;+tlig;+trep at 10pt
+ %
+ \font\mathfonttextupright = file:lucidabrightmathot.otf:ssty=0;fixmath=yes at 10pt
+ \font\mathfontscriptupright = file:lucidabrightmathot.otf:ssty=1;fixmath=yes at 7pt
+ \font\mathfontscriptscriptupright = file:lucidabrightmathot.otf:ssty=2;fixmath=yes at 5pt
+ %
+ \textfont 0 = \mathfonttextupright
+ \scriptfont 0 = \mathfontscriptupright
+ \scriptscriptfont 0 = \mathfontscriptscriptupright
+ %
+ \tenrm}
+
+\directlua {
+ if arguments["mtx:lucidabright"] then
+ tex.print("\string\\lucidabright")
+ else
+ tex.print("\string\\latinmodern")
+ end
+}
+
+\newtoks\everymathrm
+\newtoks\everymathmit
+\newtoks\everymathcal
+\newtoks\everymathit
+\newtoks\everymathsl
+\newtoks\everymathbf
+\newtoks\everymathbi
+\newtoks\everymathtt
+
+\def\rm{\fam0\relax\the\everymathrm\relax\tenrm\relax}
+\def\it{\fam0\relax\the\everymathit\relax\tenit\relax}
+\def\sl{\fam0\relax\the\everymathsl\relax\tensl\relax}
+\def\bf{\fam0\relax\the\everymathbf\relax\tenbf\relax}
+\def\bi{\fam0\relax\the\everymathbi\relax\tenbi\relax}
+\def\tt{\fam0\relax\the\everymathtt\relax\tentt\relax}
+
+\let\mit \relax % use names or \Uchar or define a vector
+\let\cal \relax % idem, i'm not in the mood for this now
+\let\oldstyle\relax % no longer misuse of math mode
+
+% tex is fast enough for this kind of assignments:
+
+\everymathrm {%
+ \Umathcode"0041="0"0"0041%
+ \Umathcode"0042="0"0"0042%
+ \Umathcode"0043="0"0"0043%
+ \Umathcode"0044="0"0"0044%
+ \Umathcode"0045="0"0"0045%
+ \Umathcode"0046="0"0"0046%
+ \Umathcode"0047="0"0"0047%
+ \Umathcode"0048="0"0"0048%
+ \Umathcode"0049="0"0"0049%
+ \Umathcode"004A="0"0"004A%
+ \Umathcode"004B="0"0"004B%
+ \Umathcode"004C="0"0"004C%
+ \Umathcode"004D="0"0"004D%
+ \Umathcode"004E="0"0"004E%
+ \Umathcode"004F="0"0"004F%
+ \Umathcode"0050="0"0"0050%
+ \Umathcode"0051="0"0"0051%
+ \Umathcode"0052="0"0"0052%
+ \Umathcode"0053="0"0"0053%
+ \Umathcode"0054="0"0"0054%
+ \Umathcode"0055="0"0"0055%
+ \Umathcode"0056="0"0"0056%
+ \Umathcode"0057="0"0"0057%
+ \Umathcode"0058="0"0"0058%
+ \Umathcode"0059="0"0"0059%
+ \Umathcode"005A="0"0"005A%
+ \Umathcode"0061="0"0"0061%
+ \Umathcode"0062="0"0"0062%
+ \Umathcode"0063="0"0"0063%
+ \Umathcode"0064="0"0"0064%
+ \Umathcode"0065="0"0"0065%
+ \Umathcode"0066="0"0"0066%
+ \Umathcode"0067="0"0"0067%
+ \Umathcode"0068="0"0"0068%
+ \Umathcode"0069="0"0"0069%
+ \Umathcode"006A="0"0"006A%
+ \Umathcode"006B="0"0"006B%
+ \Umathcode"006C="0"0"006C%
+ \Umathcode"006D="0"0"006D%
+ \Umathcode"006E="0"0"006E%
+ \Umathcode"006F="0"0"006F%
+ \Umathcode"0070="0"0"0070%
+ \Umathcode"0071="0"0"0071%
+ \Umathcode"0072="0"0"0072%
+ \Umathcode"0073="0"0"0073%
+ \Umathcode"0074="0"0"0074%
+ \Umathcode"0075="0"0"0075%
+ \Umathcode"0076="0"0"0076%
+ \Umathcode"0077="0"0"0077%
+ \Umathcode"0078="0"0"0078%
+ \Umathcode"0079="0"0"0079%
+ \Umathcode"007A="0"0"007A%
+ \Umathcode"0391="0"0"0391%
+ \Umathcode"0392="0"0"0392%
+ \Umathcode"0393="0"0"0393%
+ \Umathcode"0394="0"0"0394%
+ \Umathcode"0395="0"0"0395%
+ \Umathcode"0396="0"0"0396%
+ \Umathcode"0397="0"0"0397%
+ \Umathcode"0398="0"0"0398%
+ \Umathcode"0399="0"0"0399%
+ \Umathcode"039A="0"0"039A%
+ \Umathcode"039B="0"0"039B%
+ \Umathcode"039C="0"0"039C%
+ \Umathcode"039D="0"0"039D%
+ \Umathcode"039E="0"0"039E%
+ \Umathcode"039F="0"0"039F%
+ \Umathcode"03A0="0"0"03A0%
+ \Umathcode"03A1="0"0"03A1%
+ \Umathcode"03A3="0"0"03A3%
+ \Umathcode"03A4="0"0"03A4%
+ \Umathcode"03A5="0"0"03A5%
+ \Umathcode"03A6="0"0"03A6%
+ \Umathcode"03A7="0"0"03A7%
+ \Umathcode"03A8="0"0"03A8%
+ \Umathcode"03A9="0"0"03A9%
+ \Umathcode"03B1="0"0"03B1%
+ \Umathcode"03B2="0"0"03B2%
+ \Umathcode"03B3="0"0"03B3%
+ \Umathcode"03B4="0"0"03B4%
+ \Umathcode"03B5="0"0"03B5%
+ \Umathcode"03B6="0"0"03B6%
+ \Umathcode"03B7="0"0"03B7%
+ \Umathcode"03B8="0"0"03B8%
+ \Umathcode"03B9="0"0"03B9%
+ \Umathcode"03BA="0"0"03BA%
+ \Umathcode"03BB="0"0"03BB%
+ \Umathcode"03BC="0"0"03BC%
+ \Umathcode"03BD="0"0"03BD%
+ \Umathcode"03BE="0"0"03BE%
+ \Umathcode"03BF="0"0"03BF%
+ \Umathcode"03C0="0"0"03C0%
+ \Umathcode"03C1="0"0"03C1%
+ \Umathcode"03C2="0"0"03C2%
+ \Umathcode"03C3="0"0"03C3%
+ \Umathcode"03C4="0"0"03C4%
+ \Umathcode"03C5="0"0"03C5%
+ \Umathcode"03C6="0"0"03C6%
+ \Umathcode"03C7="0"0"03C7%
+ \Umathcode"03C8="0"0"03C8%
+ \Umathcode"03C9="0"0"03C9%
+ \Umathcode"03D1="0"0"03D1%
+ \Umathcode"03D5="0"0"03D5%
+ \Umathcode"03D6="0"0"03D6%
+ \Umathcode"03F0="0"0"03F0%
+ \Umathcode"03F1="0"0"03F1%
+ \Umathcode"03F4="0"0"03F4%
+ \Umathcode"03F5="0"0"03F5%
+ \Umathcode"2202="0"0"2202%
+ \Umathcode"2207="0"0"2207%
+ \relax
+}
+
+\everymathmit {%
+ % not done
+}
+
+\everymathcal {%
+ % not done
+}
+
+\everymathit {%
+ \Umathcode"0041="0"0"1D434%
+ \Umathcode"0042="0"0"1D435%
+ \Umathcode"0043="0"0"1D436%
+ \Umathcode"0044="0"0"1D437%
+ \Umathcode"0045="0"0"1D438%
+ \Umathcode"0046="0"0"1D439%
+ \Umathcode"0047="0"0"1D43A%
+ \Umathcode"0048="0"0"1D43B%
+ \Umathcode"0049="0"0"1D43C%
+ \Umathcode"004A="0"0"1D43D%
+ \Umathcode"004B="0"0"1D43E%
+ \Umathcode"004C="0"0"1D43F%
+ \Umathcode"004D="0"0"1D440%
+ \Umathcode"004E="0"0"1D441%
+ \Umathcode"004F="0"0"1D442%
+ \Umathcode"0050="0"0"1D443%
+ \Umathcode"0051="0"0"1D444%
+ \Umathcode"0052="0"0"1D445%
+ \Umathcode"0053="0"0"1D446%
+ \Umathcode"0054="0"0"1D447%
+ \Umathcode"0055="0"0"1D448%
+ \Umathcode"0056="0"0"1D449%
+ \Umathcode"0057="0"0"1D44A%
+ \Umathcode"0058="0"0"1D44B%
+ \Umathcode"0059="0"0"1D44C%
+ \Umathcode"005A="0"0"1D44D%
+ \Umathcode"0061="0"0"1D44E%
+ \Umathcode"0062="0"0"1D44F%
+ \Umathcode"0063="0"0"1D450%
+ \Umathcode"0064="0"0"1D451%
+ \Umathcode"0065="0"0"1D452%
+ \Umathcode"0066="0"0"1D453%
+ \Umathcode"0067="0"0"1D454%
+ \Umathcode"0068="0"0"0210E%
+ \Umathcode"0069="0"0"1D456%
+ \Umathcode"006A="0"0"1D457%
+ \Umathcode"006B="0"0"1D458%
+ \Umathcode"006C="0"0"1D459%
+ \Umathcode"006D="0"0"1D45A%
+ \Umathcode"006E="0"0"1D45B%
+ \Umathcode"006F="0"0"1D45C%
+ \Umathcode"0070="0"0"1D45D%
+ \Umathcode"0071="0"0"1D45E%
+ \Umathcode"0072="0"0"1D45F%
+ \Umathcode"0073="0"0"1D460%
+ \Umathcode"0074="0"0"1D461%
+ \Umathcode"0075="0"0"1D462%
+ \Umathcode"0076="0"0"1D463%
+ \Umathcode"0077="0"0"1D464%
+ \Umathcode"0078="0"0"1D465%
+ \Umathcode"0079="0"0"1D466%
+ \Umathcode"007A="0"0"1D467%
+ \Umathcode"0391="0"0"1D6E2%
+ \Umathcode"0392="0"0"1D6E3%
+ \Umathcode"0393="0"0"1D6E4%
+ \Umathcode"0394="0"0"1D6E5%
+ \Umathcode"0395="0"0"1D6E6%
+ \Umathcode"0396="0"0"1D6E7%
+ \Umathcode"0397="0"0"1D6E8%
+ \Umathcode"0398="0"0"1D6E9%
+ \Umathcode"0399="0"0"1D6EA%
+ \Umathcode"039A="0"0"1D6EB%
+ \Umathcode"039B="0"0"1D6EC%
+ \Umathcode"039C="0"0"1D6ED%
+ \Umathcode"039D="0"0"1D6EE%
+ \Umathcode"039E="0"0"1D6EF%
+ \Umathcode"039F="0"0"1D6F0%
+ \Umathcode"03A0="0"0"1D6F1%
+ \Umathcode"03A1="0"0"1D6F2%
+ \Umathcode"03A3="0"0"1D6F4%
+ \Umathcode"03A4="0"0"1D6F5%
+ \Umathcode"03A5="0"0"1D6F6%
+ \Umathcode"03A6="0"0"1D6F7%
+ \Umathcode"03A7="0"0"1D6F8%
+ \Umathcode"03A8="0"0"1D6F9%
+ \Umathcode"03A9="0"0"1D6FA%
+ \Umathcode"03B1="0"0"1D6FC%
+ \Umathcode"03B2="0"0"1D6FD%
+ \Umathcode"03B3="0"0"1D6FE%
+ \Umathcode"03B4="0"0"1D6FF%
+ \Umathcode"03B5="0"0"1D700%
+ \Umathcode"03B6="0"0"1D701%
+ \Umathcode"03B7="0"0"1D702%
+ \Umathcode"03B8="0"0"1D703%
+ \Umathcode"03B9="0"0"1D704%
+ \Umathcode"03BA="0"0"1D705%
+ \Umathcode"03BB="0"0"1D706%
+ \Umathcode"03BC="0"0"1D707%
+ \Umathcode"03BD="0"0"1D708%
+ \Umathcode"03BE="0"0"1D709%
+ \Umathcode"03BF="0"0"1D70A%
+ \Umathcode"03C0="0"0"1D70B%
+ \Umathcode"03C1="0"0"1D70C%
+ \Umathcode"03C2="0"0"1D70D%
+ \Umathcode"03C3="0"0"1D70E%
+ \Umathcode"03C4="0"0"1D70F%
+ \Umathcode"03C5="0"0"1D710%
+ \Umathcode"03C6="0"0"1D711%
+ \Umathcode"03C7="0"0"1D712%
+ \Umathcode"03C8="0"0"1D713%
+ \Umathcode"03C9="0"0"1D714%
+ \Umathcode"03D1="0"0"1D717%
+ \Umathcode"03D5="0"0"1D719%
+ \Umathcode"03D6="0"0"1D71B%
+ \Umathcode"03F0="0"0"1D718%
+ \Umathcode"03F1="0"0"1D71A%
+ \Umathcode"03F4="0"0"1D6F3%
+ \Umathcode"03F5="0"0"1D716%
+ \Umathcode"2202="0"0"1D715%
+ \Umathcode"2207="0"0"1D6FB%
+ \relax
+}
+
+\everymathsl {%
+ \the\everymathit
+}
+
+\everymathbf {%
+ \Umathcode"0030="0"0"1D7CE%
+ \Umathcode"0031="0"0"1D7CF%
+ \Umathcode"0032="0"0"1D7D0%
+ \Umathcode"0033="0"0"1D7D1%
+ \Umathcode"0034="0"0"1D7D2%
+ \Umathcode"0035="0"0"1D7D3%
+ \Umathcode"0036="0"0"1D7D4%
+ \Umathcode"0037="0"0"1D7D5%
+ \Umathcode"0038="0"0"1D7D6%
+ \Umathcode"0039="0"0"1D7D7%
+ \Umathcode"0041="0"0"1D400%
+ \Umathcode"0042="0"0"1D401%
+ \Umathcode"0043="0"0"1D402%
+ \Umathcode"0044="0"0"1D403%
+ \Umathcode"0045="0"0"1D404%
+ \Umathcode"0046="0"0"1D405%
+ \Umathcode"0047="0"0"1D406%
+ \Umathcode"0048="0"0"1D407%
+ \Umathcode"0049="0"0"1D408%
+ \Umathcode"004A="0"0"1D409%
+ \Umathcode"004B="0"0"1D40A%
+ \Umathcode"004C="0"0"1D40B%
+ \Umathcode"004D="0"0"1D40C%
+ \Umathcode"004E="0"0"1D40D%
+ \Umathcode"004F="0"0"1D40E%
+ \Umathcode"0050="0"0"1D40F%
+ \Umathcode"0051="0"0"1D410%
+ \Umathcode"0052="0"0"1D411%
+ \Umathcode"0053="0"0"1D412%
+ \Umathcode"0054="0"0"1D413%
+ \Umathcode"0055="0"0"1D414%
+ \Umathcode"0056="0"0"1D415%
+ \Umathcode"0057="0"0"1D416%
+ \Umathcode"0058="0"0"1D417%
+ \Umathcode"0059="0"0"1D418%
+ \Umathcode"005A="0"0"1D419%
+ \Umathcode"0061="0"0"1D41A%
+ \Umathcode"0062="0"0"1D41B%
+ \Umathcode"0063="0"0"1D41C%
+ \Umathcode"0064="0"0"1D41D%
+ \Umathcode"0065="0"0"1D41E%
+ \Umathcode"0066="0"0"1D41F%
+ \Umathcode"0067="0"0"1D420%
+ \Umathcode"0068="0"0"1D421%
+ \Umathcode"0069="0"0"1D422%
+ \Umathcode"006A="0"0"1D423%
+ \Umathcode"006B="0"0"1D424%
+ \Umathcode"006C="0"0"1D425%
+ \Umathcode"006D="0"0"1D426%
+ \Umathcode"006E="0"0"1D427%
+ \Umathcode"006F="0"0"1D428%
+ \Umathcode"0070="0"0"1D429%
+ \Umathcode"0071="0"0"1D42A%
+ \Umathcode"0072="0"0"1D42B%
+ \Umathcode"0073="0"0"1D42C%
+ \Umathcode"0074="0"0"1D42D%
+ \Umathcode"0075="0"0"1D42E%
+ \Umathcode"0076="0"0"1D42F%
+ \Umathcode"0077="0"0"1D430%
+ \Umathcode"0078="0"0"1D431%
+ \Umathcode"0079="0"0"1D432%
+ \Umathcode"007A="0"0"1D433%
+ \Umathcode"0391="0"0"1D6A8%
+ \Umathcode"0392="0"0"1D6A9%
+ \Umathcode"0393="0"0"1D6AA%
+ \Umathcode"0394="0"0"1D6AB%
+ \Umathcode"0395="0"0"1D6AC%
+ \Umathcode"0396="0"0"1D6AD%
+ \Umathcode"0397="0"0"1D6AE%
+ \Umathcode"0398="0"0"1D6AF%
+ \Umathcode"0399="0"0"1D6B0%
+ \Umathcode"039A="0"0"1D6B1%
+ \Umathcode"039B="0"0"1D6B2%
+ \Umathcode"039C="0"0"1D6B3%
+ \Umathcode"039D="0"0"1D6B4%
+ \Umathcode"039E="0"0"1D6B5%
+ \Umathcode"039F="0"0"1D6B6%
+ \Umathcode"03A0="0"0"1D6B7%
+ \Umathcode"03A1="0"0"1D6B8%
+ \Umathcode"03A3="0"0"1D6BA%
+ \Umathcode"03A4="0"0"1D6BB%
+ \Umathcode"03A5="0"0"1D6BC%
+ \Umathcode"03A6="0"0"1D6BD%
+ \Umathcode"03A7="0"0"1D6BE%
+ \Umathcode"03A8="0"0"1D6BF%
+ \Umathcode"03A9="0"0"1D6C0%
+ \Umathcode"03B1="0"0"1D6C2%
+ \Umathcode"03B2="0"0"1D6C3%
+ \Umathcode"03B3="0"0"1D6C4%
+ \Umathcode"03B4="0"0"1D6C5%
+ \Umathcode"03B5="0"0"1D6C6%
+ \Umathcode"03B6="0"0"1D6C7%
+ \Umathcode"03B7="0"0"1D6C8%
+ \Umathcode"03B8="0"0"1D6C9%
+ \Umathcode"03B9="0"0"1D6CA%
+ \Umathcode"03BA="0"0"1D6CB%
+ \Umathcode"03BB="0"0"1D6CC%
+ \Umathcode"03BC="0"0"1D6CD%
+ \Umathcode"03BD="0"0"1D6CE%
+ \Umathcode"03BE="0"0"1D6CF%
+ \Umathcode"03BF="0"0"1D6D0%
+ \Umathcode"03C0="0"0"1D6D1%
+ \Umathcode"03C1="0"0"1D6D2%
+ \Umathcode"03C2="0"0"1D6D3%
+ \Umathcode"03C3="0"0"1D6D4%
+ \Umathcode"03C4="0"0"1D6D5%
+ \Umathcode"03C5="0"0"1D6D6%
+ \Umathcode"03C6="0"0"1D6D7%
+ \Umathcode"03C7="0"0"1D6D8%
+ \Umathcode"03C8="0"0"1D6D9%
+ \Umathcode"03C9="0"0"1D6DA%
+ \Umathcode"03D1="0"0"1D6DD%
+ \Umathcode"03D5="0"0"1D6DF%
+ \Umathcode"03D6="0"0"1D6E1%
+ \Umathcode"03F0="0"0"1D6DE%
+ \Umathcode"03F1="0"0"1D6E0%
+ \Umathcode"03F4="0"0"1D6B9%
+ \Umathcode"03F5="0"0"1D6DC%
+ \Umathcode"2202="0"0"1D6DB%
+ \Umathcode"2207="0"0"1D6C1%
+ \relax
+}
+
+\everymathbi {%
+ \Umathcode"0030="0"0"1D7CE%
+ \Umathcode"0031="0"0"1D7CF%
+ \Umathcode"0032="0"0"1D7D0%
+ \Umathcode"0033="0"0"1D7D1%
+ \Umathcode"0034="0"0"1D7D2%
+ \Umathcode"0035="0"0"1D7D3%
+ \Umathcode"0036="0"0"1D7D4%
+ \Umathcode"0037="0"0"1D7D5%
+ \Umathcode"0038="0"0"1D7D6%
+ \Umathcode"0039="0"0"1D7D7%
+ \Umathcode"0041="0"0"1D468%
+ \Umathcode"0042="0"0"1D469%
+ \Umathcode"0043="0"0"1D46A%
+ \Umathcode"0044="0"0"1D46B%
+ \Umathcode"0045="0"0"1D46C%
+ \Umathcode"0046="0"0"1D46D%
+ \Umathcode"0047="0"0"1D46E%
+ \Umathcode"0048="0"0"1D46F%
+ \Umathcode"0049="0"0"1D470%
+ \Umathcode"004A="0"0"1D471%
+ \Umathcode"004B="0"0"1D472%
+ \Umathcode"004C="0"0"1D473%
+ \Umathcode"004D="0"0"1D474%
+ \Umathcode"004E="0"0"1D475%
+ \Umathcode"004F="0"0"1D476%
+ \Umathcode"0050="0"0"1D477%
+ \Umathcode"0051="0"0"1D478%
+ \Umathcode"0052="0"0"1D479%
+ \Umathcode"0053="0"0"1D47A%
+ \Umathcode"0054="0"0"1D47B%
+ \Umathcode"0055="0"0"1D47C%
+ \Umathcode"0056="0"0"1D47D%
+ \Umathcode"0057="0"0"1D47E%
+ \Umathcode"0058="0"0"1D47F%
+ \Umathcode"0059="0"0"1D480%
+ \Umathcode"005A="0"0"1D481%
+ \Umathcode"0061="0"0"1D482%
+ \Umathcode"0062="0"0"1D483%
+ \Umathcode"0063="0"0"1D484%
+ \Umathcode"0064="0"0"1D485%
+ \Umathcode"0065="0"0"1D486%
+ \Umathcode"0066="0"0"1D487%
+ \Umathcode"0067="0"0"1D488%
+ \Umathcode"0068="0"0"1D489%
+ \Umathcode"0069="0"0"1D48A%
+ \Umathcode"006A="0"0"1D48B%
+ \Umathcode"006B="0"0"1D48C%
+ \Umathcode"006C="0"0"1D48D%
+ \Umathcode"006D="0"0"1D48E%
+ \Umathcode"006E="0"0"1D48F%
+ \Umathcode"006F="0"0"1D490%
+ \Umathcode"0070="0"0"1D491%
+ \Umathcode"0071="0"0"1D492%
+ \Umathcode"0072="0"0"1D493%
+ \Umathcode"0073="0"0"1D494%
+ \Umathcode"0074="0"0"1D495%
+ \Umathcode"0075="0"0"1D496%
+ \Umathcode"0076="0"0"1D497%
+ \Umathcode"0077="0"0"1D498%
+ \Umathcode"0078="0"0"1D499%
+ \Umathcode"0079="0"0"1D49A%
+ \Umathcode"007A="0"0"1D49B%
+ \Umathcode"0391="0"0"1D71C%
+ \Umathcode"0392="0"0"1D71D%
+ \Umathcode"0393="0"0"1D71E%
+ \Umathcode"0394="0"0"1D71F%
+ \Umathcode"0395="0"0"1D720%
+ \Umathcode"0396="0"0"1D721%
+ \Umathcode"0397="0"0"1D722%
+ \Umathcode"0398="0"0"1D723%
+ \Umathcode"0399="0"0"1D724%
+ \Umathcode"039A="0"0"1D725%
+ \Umathcode"039B="0"0"1D726%
+ \Umathcode"039C="0"0"1D727%
+ \Umathcode"039D="0"0"1D728%
+ \Umathcode"039E="0"0"1D729%
+ \Umathcode"039F="0"0"1D72A%
+ \Umathcode"03A0="0"0"1D72B%
+ \Umathcode"03A1="0"0"1D72C%
+ \Umathcode"03A3="0"0"1D72E%
+ \Umathcode"03A4="0"0"1D72F%
+ \Umathcode"03A5="0"0"1D730%
+ \Umathcode"03A6="0"0"1D731%
+ \Umathcode"03A7="0"0"1D732%
+ \Umathcode"03A8="0"0"1D733%
+ \Umathcode"03A9="0"0"1D734%
+ \Umathcode"03B1="0"0"1D736%
+ \Umathcode"03B2="0"0"1D737%
+ \Umathcode"03B3="0"0"1D738%
+ \Umathcode"03B4="0"0"1D739%
+ \Umathcode"03B5="0"0"1D73A%
+ \Umathcode"03B6="0"0"1D73B%
+ \Umathcode"03B7="0"0"1D73C%
+ \Umathcode"03B8="0"0"1D73D%
+ \Umathcode"03B9="0"0"1D73E%
+ \Umathcode"03BA="0"0"1D73F%
+ \Umathcode"03BB="0"0"1D740%
+ \Umathcode"03BC="0"0"1D741%
+ \Umathcode"03BD="0"0"1D742%
+ \Umathcode"03BE="0"0"1D743%
+ \Umathcode"03BF="0"0"1D744%
+ \Umathcode"03C0="0"0"1D745%
+ \Umathcode"03C1="0"0"1D746%
+ \Umathcode"03C2="0"0"1D747%
+ \Umathcode"03C3="0"0"1D748%
+ \Umathcode"03C4="0"0"1D749%
+ \Umathcode"03C5="0"0"1D74A%
+ \Umathcode"03C6="0"0"1D74B%
+ \Umathcode"03C7="0"0"1D74C%
+ \Umathcode"03C8="0"0"1D74D%
+ \Umathcode"03C9="0"0"1D74E%
+ \Umathcode"03D1="0"0"1D751%
+ \Umathcode"03D5="0"0"1D753%
+ \Umathcode"03D6="0"0"1D755%
+ \Umathcode"03F0="0"0"1D752%
+ \Umathcode"03F1="0"0"1D754%
+ \Umathcode"03F4="0"0"1D72D%
+ \Umathcode"03F5="0"0"1D750%
+ \Umathcode"2202="0"0"1D74F%
+ \Umathcode"2207="0"0"1D735%
+ \relax
+}
+
+\everymathtt {%
+ % not done
+}
+
+\Udelcode "00021 = "0 "00021
+\Udelcode "00028 = "0 "00028
+\Udelcode "00028 = "0 "00028
+\Udelcode "00029 = "0 "00029
+\Udelcode "00029 = "0 "00029
+\Udelcode "0002F = "0 "0002F
+\Udelcode "0002F = "0 "0002F
+\Udelcode "0002F = "0 "02044
+\Udelcode "0003F = "0 "0003F
+\Udelcode "0005B = "0 "0005B
+\Udelcode "0005B = "0 "0005B
+\Udelcode "0005D = "0 "0005D
+\Udelcode "0005D = "0 "0005D
+\Udelcode "0007B = "0 "0007B
+\Udelcode "0007B = "0 "0007B
+\Udelcode "0007C = "0 "0007C
+\Udelcode "0007C = "0 "0007C
+\Udelcode "0007C = "0 "0007C
+\Udelcode "0007C = "0 "0007C
+\Udelcode "0007C = "0 "0007C
+\Udelcode "0007D = "0 "0007D
+\Udelcode "0007D = "0 "0007D
+\Udelcode "02016 = "0 "02016
+\Udelcode "02016 = "0 "02016
+\Udelcode "02016 = "0 "02016
+\Udelcode "02016 = "0 "02016
+\Udelcode "02016 = "0 "02016
+\Udelcode "02044 = "0 "02044
+\Udelcode "02044 = "0 "02044
+\Udelcode "02308 = "0 "02308
+\Udelcode "02308 = "0 "02308
+\Udelcode "02308 = "0 "02308
+\Udelcode "02308 = "0 "02308
+\Udelcode "02308 = "0 "02308
+\Udelcode "02309 = "0 "02309
+\Udelcode "02309 = "0 "02309
+\Udelcode "02309 = "0 "02309
+\Udelcode "02309 = "0 "02309
+\Udelcode "02309 = "0 "02309
+\Udelcode "0230A = "0 "0230A
+\Udelcode "0230A = "0 "0230A
+\Udelcode "0230B = "0 "0230B
+\Udelcode "0230B = "0 "0230B
+\Udelcode "0231C = "0 "0231C
+\Udelcode "0231C = "0 "0231C
+\Udelcode "0231D = "0 "0231D
+\Udelcode "0231D = "0 "0231D
+\Udelcode "0231E = "0 "0231E
+\Udelcode "0231E = "0 "0231E
+\Udelcode "0231F = "0 "0231F
+\Udelcode "0231F = "0 "0231F
+\Udelcode "023B0 = "0 "023B0
+\Udelcode "023B0 = "0 "023B0
+\Udelcode "023B1 = "0 "023B1
+\Udelcode "023B1 = "0 "023B1
+\Udelcode "027E6 = "0 "027E6
+\Udelcode "027E6 = "0 "027E6
+\Udelcode "027E7 = "0 "027E7
+\Udelcode "027E7 = "0 "027E7
+\Udelcode "027E8 = "0 "027E8
+\Udelcode "027E8 = "0 "027E8
+\Udelcode "027E9 = "0 "027E9
+\Udelcode "027E9 = "0 "027E9
+\Udelcode "027EA = "0 "027EA
+\Udelcode "027EA = "0 "027EA
+\Udelcode "027EB = "0 "027EB
+\Udelcode "027EB = "0 "027EB
+\Udelcode "027EE = "0 "027EE
+\Udelcode "027EE = "0 "027EE
+\Udelcode "027EF = "0 "027EF
+\Udelcode "027EF = "0 "027EF
+
+\Umathcode "00021 = "5 "0 "00021
+\Umathcode "00022 = "0 "0 "00022
+\Umathcode "00027 = "0 "0 "00027
+\Umathcode "00028 = "4 "0 "00028
+\Umathcode "00029 = "5 "0 "00029
+\Umathcode "0002A = "2 "0 "02217
+\Umathcode "0002B = "2 "0 "0002B
+\Umathcode "0002C = "6 "0 "0002C
+\Umathcode "0002D = "2 "0 "02212
+\Umathcode "0002E = "6 "0 "0002E
+\Umathcode "0002F = "4 "0 "02044
+\Umathcode "0003A = "3 "0 "0003A
+\Umathcode "0003B = "6 "0 "0003B
+\Umathcode "0003C = "3 "0 "0003C
+\Umathcode "0003D = "3 "0 "0003D
+\Umathcode "0003E = "3 "0 "0003E
+\Umathcode "0003F = "5 "0 "0003F
+\Umathcode "0005B = "4 "0 "0005B
+\Umathcode "0005C = "0 "0 "0005C
+\Umathcode "0005D = "5 "0 "0005D
+\Umathcode "0007B = "4 "0 "0007B
+\Umathcode "0007C = "0 "0 "0007C
+\Umathcode "0007D = "5 "0 "0007D
+\Umathcode "000A5 = "0 "0 "000A5
+\Umathcode "000A7 = "0 "0 "000A7
+\Umathcode "000AC = "0 "0 "000AC
+\Umathcode "000B1 = "2 "0 "000B1
+\Umathcode "000B6 = "0 "0 "000B6
+\Umathcode "000B7 = "2 "0 "000B7
+\Umathcode "000D7 = "2 "0 "000D7
+\Umathcode "000F0 = "0 "0 "000F0
+\Umathcode "000F7 = "2 "0 "000F7
+\Umathcode "00338 = "3 "0 "00338
+\Umathcode "003F0 = "0 "0 "003F0
+\Umathcode "02016 = "0 "0 "02016
+\Umathcode "02020 = "2 "0 "02020
+\Umathcode "02021 = "2 "0 "02021
+\Umathcode "02022 = "2 "0 "02022
+\Umathcode "02026 = "0 "0 "02026
+\Umathcode "02032 = "0 "0 "02032
+\Umathcode "02033 = "0 "0 "02033
+\Umathcode "02034 = "0 "0 "02034
+\Umathcode "02044 = "0 "0 "02044
+\Umathcode "0207A = "2 "0 "0207A
+\Umathcode "0207B = "2 "0 "0207B
+\Umathcode "020DD = "0 "0 "020DD
+\Umathcode "020DE = "0 "0 "020DE
+\Umathcode "020DF = "0 "0 "020DF
+\Umathcode "02111 = "0 "0 "02111
+\Umathcode "02113 = "0 "0 "02113
+\Umathcode "02118 = "0 "0 "02118
+\Umathcode "0211C = "0 "0 "0211C
+\Umathcode "02132 = "0 "0 "02132
+\Umathcode "02135 = "0 "0 "02135
+\Umathcode "02136 = "0 "0 "02136
+\Umathcode "02137 = "0 "0 "02137
+\Umathcode "02138 = "0 "0 "02138
+\Umathcode "02141 = "0 "0 "02141
+\Umathcode "02142 = "0 "0 "02142
+\Umathcode "02143 = "0 "0 "02143
+\Umathcode "02144 = "0 "0 "02144
+\Umathcode "02145 = "0 "0 "02145
+\Umathcode "02146 = "0 "0 "02146
+\Umathcode "02147 = "0 "0 "02147
+\Umathcode "02148 = "0 "0 "02148
+\Umathcode "02149 = "0 "0 "02149
+\Umathcode "0214A = "0 "0 "0214A
+\Umathcode "0214B = "2 "0 "0214B
+\Umathcode "02190 = "3 "0 "02190
+\Umathcode "02191 = "3 "0 "02191
+\Umathcode "02192 = "3 "0 "02192
+\Umathcode "02193 = "3 "0 "02193
+\Umathcode "02194 = "3 "0 "02194
+\Umathcode "02195 = "3 "0 "02195
+\Umathcode "02196 = "3 "0 "02196
+\Umathcode "02197 = "3 "0 "02197
+\Umathcode "02198 = "3 "0 "02198
+\Umathcode "02199 = "3 "0 "02199
+\Umathcode "0219A = "3 "0 "0219A
+\Umathcode "0219B = "3 "0 "0219B
+\Umathcode "0219C = "3 "0 "0219C
+\Umathcode "0219D = "3 "0 "0219D
+\Umathcode "0219E = "3 "0 "0219E
+\Umathcode "0219F = "3 "0 "0219F
+\Umathcode "021A0 = "3 "0 "021A0
+\Umathcode "021A1 = "3 "0 "021A1
+\Umathcode "021A2 = "3 "0 "021A2
+\Umathcode "021A3 = "3 "0 "021A3
+\Umathcode "021A4 = "3 "0 "021A4
+\Umathcode "021A5 = "3 "0 "021A5
+\Umathcode "021A6 = "3 "0 "021A6
+\Umathcode "021A7 = "3 "0 "021A7
+\Umathcode "021A8 = "0 "0 "021A8
+\Umathcode "021A9 = "3 "0 "021A9
+\Umathcode "021AA = "3 "0 "021AA
+\Umathcode "021AB = "3 "0 "021AB
+\Umathcode "021AC = "3 "0 "021AC
+\Umathcode "021AD = "3 "0 "021AD
+\Umathcode "021AE = "3 "0 "021AE
+\Umathcode "021AF = "3 "0 "021AF
+\Umathcode "021B0 = "3 "0 "021B0
+\Umathcode "021B1 = "3 "0 "021B1
+\Umathcode "021B2 = "3 "0 "021B2
+\Umathcode "021B3 = "3 "0 "021B3
+\Umathcode "021B4 = "0 "0 "021B4
+\Umathcode "021B5 = "0 "0 "021B5
+\Umathcode "021B6 = "3 "0 "021B6
+\Umathcode "021B7 = "3 "0 "021B7
+\Umathcode "021B8 = "3 "0 "021B8
+\Umathcode "021B9 = "3 "0 "021B9
+\Umathcode "021BA = "3 "0 "021BA
+\Umathcode "021BB = "3 "0 "021BB
+\Umathcode "021BC = "3 "0 "021BC
+\Umathcode "021BD = "3 "0 "021BD
+\Umathcode "021BE = "3 "0 "021BE
+\Umathcode "021BF = "3 "0 "021BF
+\Umathcode "021C0 = "3 "0 "021C0
+\Umathcode "021C1 = "3 "0 "021C1
+\Umathcode "021C2 = "3 "0 "021C2
+\Umathcode "021C3 = "3 "0 "021C3
+\Umathcode "021C4 = "3 "0 "021C4
+\Umathcode "021C5 = "3 "0 "021C5
+\Umathcode "021C6 = "3 "0 "021C6
+\Umathcode "021C7 = "3 "0 "021C7
+\Umathcode "021C8 = "3 "0 "021C8
+\Umathcode "021C9 = "3 "0 "021C9
+\Umathcode "021CA = "3 "0 "021CA
+\Umathcode "021CB = "3 "0 "021CB
+\Umathcode "021CC = "3 "0 "021CC
+\Umathcode "021CD = "3 "0 "021CD
+\Umathcode "021CE = "3 "0 "021CE
+\Umathcode "021CF = "3 "0 "021CF
+\Umathcode "021D0 = "3 "0 "021D0
+\Umathcode "021D1 = "3 "0 "021D1
+\Umathcode "021D2 = "3 "0 "021D2
+\Umathcode "021D3 = "3 "0 "021D3
+\Umathcode "021D4 = "3 "0 "021D4
+\Umathcode "021D5 = "3 "0 "021D5
+\Umathcode "021D6 = "3 "0 "021D6
+\Umathcode "021D7 = "3 "0 "021D7
+\Umathcode "021D8 = "3 "0 "021D8
+\Umathcode "021D9 = "3 "0 "021D9
+\Umathcode "021DA = "3 "0 "021DA
+\Umathcode "021DB = "3 "0 "021DB
+\Umathcode "021DC = "3 "0 "021DC
+\Umathcode "021DD = "3 "0 "021DD
+\Umathcode "021DE = "3 "0 "021DE
+\Umathcode "021DF = "3 "0 "021DF
+\Umathcode "021E0 = "3 "0 "021E0
+\Umathcode "021E1 = "3 "0 "021E1
+\Umathcode "021E2 = "3 "0 "021E2
+\Umathcode "021E3 = "3 "0 "021E3
+\Umathcode "021E4 = "3 "0 "021E4
+\Umathcode "021E5 = "3 "0 "021E5
+\Umathcode "021E6 = "0 "0 "021E6
+\Umathcode "021E7 = "0 "0 "021E7
+\Umathcode "021E8 = "0 "0 "021E8
+\Umathcode "021E9 = "0 "0 "021E9
+\Umathcode "021EB = "0 "0 "021EB
+\Umathcode "021F4 = "3 "0 "021F4
+\Umathcode "021F5 = "3 "0 "021F5
+\Umathcode "021F6 = "3 "0 "021F6
+\Umathcode "021F7 = "3 "0 "021F7
+\Umathcode "021F8 = "3 "0 "021F8
+\Umathcode "021F9 = "3 "0 "021F9
+\Umathcode "021FA = "3 "0 "021FA
+\Umathcode "021FB = "3 "0 "021FB
+\Umathcode "021FC = "3 "0 "021FC
+\Umathcode "021FD = "3 "0 "021FD
+\Umathcode "021FE = "3 "0 "021FE
+\Umathcode "021FF = "3 "0 "021FF
+\Umathcode "02200 = "0 "0 "02200
+\Umathcode "02201 = "0 "0 "02201
+\Umathcode "02202 = "0 "0 "02202
+\Umathcode "02203 = "0 "0 "02203
+\Umathcode "02204 = "0 "0 "02204
+\Umathcode "02205 = "0 "0 "02205
+\Umathcode "02208 = "3 "0 "02208
+\Umathcode "02209 = "3 "0 "02209
+\Umathcode "0220B = "3 "0 "0220B
+\Umathcode "0220C = "3 "0 "0220C
+\Umathcode "0220F = "1 "0 "0220F
+\Umathcode "02210 = "1 "0 "02210
+\Umathcode "02211 = "1 "0 "02211
+\Umathcode "02212 = "2 "0 "02212
+\Umathcode "02213 = "2 "0 "02213
+\Umathcode "02214 = "2 "0 "02214
+\Umathcode "02216 = "2 "0 "02216
+\Umathcode "02217 = "2 "0 "02217
+\Umathcode "02218 = "2 "0 "02218
+\Umathcode "02219 = "2 "0 "02219
+\Umathcode "0221D = "3 "0 "0221D
+\Umathcode "0221E = "0 "0 "0221E
+\Umathcode "0221F = "0 "0 "0221F
+\Umathcode "02220 = "0 "0 "02220
+\Umathcode "02221 = "0 "0 "02221
+\Umathcode "02222 = "0 "0 "02222
+\Umathcode "02223 = "2 "0 "02223
+\Umathcode "02224 = "2 "0 "02224
+\Umathcode "02225 = "3 "0 "02225
+\Umathcode "02226 = "3 "0 "02226
+\Umathcode "02227 = "2 "0 "02227
+\Umathcode "02228 = "2 "0 "02228
+\Umathcode "02229 = "2 "0 "02229
+\Umathcode "0222A = "2 "0 "0222A
+\Umathcode "0222B = "1 "0 "0222B
+\Umathcode "0222C = "1 "0 "0222C
+\Umathcode "0222D = "1 "0 "0222D
+\Umathcode "0222E = "1 "0 "0222E
+\Umathcode "0222F = "1 "0 "0222F
+\Umathcode "02230 = "1 "0 "02230
+\Umathcode "02231 = "1 "0 "02231
+\Umathcode "02232 = "1 "0 "02232
+\Umathcode "02233 = "1 "0 "02233
+\Umathcode "02234 = "3 "0 "02234
+\Umathcode "02235 = "3 "0 "02235
+\Umathcode "02236 = "6 "0 "02236
+\Umathcode "02237 = "3 "0 "02237
+\Umathcode "02238 = "2 "0 "02238
+\Umathcode "02239 = "3 "0 "02239
+\Umathcode "0223C = "3 "0 "0223C
+\Umathcode "0223D = "3 "0 "0223D
+\Umathcode "02240 = "2 "0 "02240
+\Umathcode "02241 = "3 "0 "02241
+\Umathcode "02242 = "3 "0 "02242
+\Umathcode "02243 = "3 "0 "02243
+\Umathcode "02244 = "3 "0 "02244
+\Umathcode "02245 = "3 "0 "02245
+\Umathcode "02246 = "3 "0 "02246
+\Umathcode "02247 = "3 "0 "02247
+\Umathcode "02248 = "3 "0 "02248
+\Umathcode "02249 = "3 "0 "02249
+\Umathcode "0224A = "3 "0 "0224A
+\Umathcode "0224C = "3 "0 "0224C
+\Umathcode "0224D = "3 "0 "0224D
+\Umathcode "0224E = "3 "0 "0224E
+\Umathcode "02250 = "3 "0 "02250
+\Umathcode "02251 = "3 "0 "02251
+\Umathcode "02252 = "3 "0 "02252
+\Umathcode "02253 = "3 "0 "02253
+\Umathcode "02254 = "3 "0 "02254
+\Umathcode "02255 = "3 "0 "02255
+\Umathcode "02256 = "3 "0 "02256
+\Umathcode "02257 = "3 "0 "02257
+\Umathcode "02259 = "3 "0 "02259
+\Umathcode "0225A = "3 "0 "0225A
+\Umathcode "0225B = "3 "0 "0225B
+\Umathcode "0225C = "3 "0 "0225C
+\Umathcode "0225D = "3 "0 "0225D
+\Umathcode "0225E = "3 "0 "0225E
+\Umathcode "0225F = "3 "0 "0225F
+\Umathcode "02260 = "3 "0 "02260
+\Umathcode "02261 = "3 "0 "02261
+\Umathcode "02262 = "3 "0 "02262
+\Umathcode "02263 = "3 "0 "02263
+\Umathcode "02264 = "3 "0 "02264
+\Umathcode "02265 = "3 "0 "02265
+\Umathcode "02266 = "3 "0 "02266
+\Umathcode "02267 = "3 "0 "02267
+\Umathcode "02268 = "3 "0 "02268
+\Umathcode "02269 = "3 "0 "02269
+\Umathcode "0226A = "3 "0 "0226A
+\Umathcode "0226B = "3 "0 "0226B
+\Umathcode "0226C = "3 "0 "0226C
+\Umathcode "0226D = "3 "0 "0226D
+\Umathcode "0226E = "3 "0 "0226E
+\Umathcode "0226F = "3 "0 "0226F
+\Umathcode "02270 = "3 "0 "02270
+\Umathcode "02271 = "3 "0 "02271
+\Umathcode "02272 = "3 "0 "02272
+\Umathcode "02273 = "3 "0 "02273
+\Umathcode "02274 = "3 "0 "02274
+\Umathcode "02275 = "3 "0 "02275
+\Umathcode "02276 = "3 "0 "02276
+\Umathcode "02277 = "3 "0 "02277
+\Umathcode "02278 = "3 "0 "02278
+\Umathcode "02279 = "3 "0 "02279
+\Umathcode "0227A = "3 "0 "0227A
+\Umathcode "0227B = "3 "0 "0227B
+\Umathcode "0227C = "3 "0 "0227C
+\Umathcode "0227D = "3 "0 "0227D
+\Umathcode "0227E = "3 "0 "0227E
+\Umathcode "0227F = "3 "0 "0227F
+\Umathcode "02280 = "3 "0 "02280
+\Umathcode "02281 = "3 "0 "02281
+\Umathcode "02282 = "3 "0 "02282
+\Umathcode "02283 = "3 "0 "02283
+\Umathcode "02284 = "3 "0 "02284
+\Umathcode "02285 = "3 "0 "02285
+\Umathcode "02286 = "3 "0 "02286
+\Umathcode "02287 = "3 "0 "02287
+\Umathcode "02288 = "3 "0 "02288
+\Umathcode "02289 = "3 "0 "02289
+\Umathcode "0228A = "3 "0 "0228A
+\Umathcode "0228B = "3 "0 "0228B
+\Umathcode "0228E = "2 "0 "0228E
+\Umathcode "0228F = "3 "0 "0228F
+\Umathcode "02290 = "3 "0 "02290
+\Umathcode "02291 = "2 "0 "02291
+\Umathcode "02292 = "2 "0 "02292
+\Umathcode "02293 = "2 "0 "02293
+\Umathcode "02294 = "2 "0 "02294
+\Umathcode "02295 = "2 "0 "02295
+\Umathcode "02296 = "2 "0 "02296
+\Umathcode "02297 = "2 "0 "02297
+\Umathcode "02298 = "2 "0 "02298
+\Umathcode "02299 = "2 "0 "02299
+\Umathcode "0229A = "2 "0 "0229A
+\Umathcode "0229B = "2 "0 "0229B
+\Umathcode "0229C = "2 "0 "0229C
+\Umathcode "0229D = "2 "0 "0229D
+\Umathcode "0229E = "2 "0 "0229E
+\Umathcode "0229F = "2 "0 "0229F
+\Umathcode "022A0 = "2 "0 "022A0
+\Umathcode "022A1 = "2 "0 "022A1
+\Umathcode "022A2 = "3 "0 "022A2
+\Umathcode "022A3 = "3 "0 "022A3
+\Umathcode "022A4 = "0 "0 "022A4
+\Umathcode "022A5 = "0 "0 "022A5
+\Umathcode "022A7 = "3 "0 "022A7
+\Umathcode "022A8 = "3 "0 "022A8
+\Umathcode "022A9 = "3 "0 "022A9
+\Umathcode "022AA = "3 "0 "022AA
+\Umathcode "022AB = "3 "0 "022AB
+\Umathcode "022AC = "3 "0 "022AC
+\Umathcode "022AD = "3 "0 "022AD
+\Umathcode "022AE = "3 "0 "022AE
+\Umathcode "022AF = "3 "0 "022AF
+\Umathcode "022B2 = "2 "0 "022B2
+\Umathcode "022B3 = "2 "0 "022B3
+\Umathcode "022B8 = "3 "0 "022B8
+\Umathcode "022BA = "2 "0 "022BA
+\Umathcode "022BB = "2 "0 "022BB
+\Umathcode "022BC = "2 "0 "022BC
+\Umathcode "022C0 = "1 "0 "022C0
+\Umathcode "022C1 = "1 "0 "022C1
+\Umathcode "022C2 = "1 "0 "022C2
+\Umathcode "022C3 = "1 "0 "022C3
+\Umathcode "022C4 = "2 "0 "022C4
+\Umathcode "022C5 = "2 "0 "022C5
+\Umathcode "022C6 = "2 "0 "022C6
+\Umathcode "022C7 = "2 "0 "022C7
+\Umathcode "022C8 = "3 "0 "022C8
+\Umathcode "022C9 = "2 "0 "022C9
+\Umathcode "022CA = "2 "0 "022CA
+\Umathcode "022CB = "2 "0 "022CB
+\Umathcode "022CC = "2 "0 "022CC
+\Umathcode "022CE = "2 "0 "022CE
+\Umathcode "022CF = "2 "0 "022CF
+\Umathcode "022D0 = "3 "0 "022D0
+\Umathcode "022D1 = "3 "0 "022D1
+\Umathcode "022D2 = "2 "0 "022D2
+\Umathcode "022D3 = "2 "0 "022D3
+\Umathcode "022D4 = "3 "0 "022D4
+\Umathcode "022D6 = "2 "0 "022D6
+\Umathcode "022D7 = "2 "0 "022D7
+\Umathcode "022D8 = "3 "0 "022D8
+\Umathcode "022D9 = "3 "0 "022D9
+\Umathcode "022DA = "3 "0 "022DA
+\Umathcode "022DB = "3 "0 "022DB
+\Umathcode "022DC = "3 "0 "022DC
+\Umathcode "022DD = "3 "0 "022DD
+\Umathcode "022DE = "3 "0 "022DE
+\Umathcode "022DF = "3 "0 "022DF
+\Umathcode "022E0 = "3 "0 "022E0
+\Umathcode "022E1 = "3 "0 "022E1
+\Umathcode "022E2 = "3 "0 "022E2
+\Umathcode "022E3 = "3 "0 "022E3
+\Umathcode "022E4 = "3 "0 "022E4
+\Umathcode "022E5 = "3 "0 "022E5
+\Umathcode "022E6 = "3 "0 "022E6
+\Umathcode "022E7 = "3 "0 "022E7
+\Umathcode "022E8 = "3 "0 "022E8
+\Umathcode "022E9 = "3 "0 "022E9
+\Umathcode "022EA = "3 "0 "022EA
+\Umathcode "022EB = "3 "0 "022EB
+\Umathcode "022EC = "3 "0 "022EC
+\Umathcode "022ED = "3 "0 "022ED
+\Umathcode "022EE = "0 "0 "022EE
+\Umathcode "022EF = "0 "0 "022EF
+\Umathcode "022F0 = "0 "0 "022F0
+\Umathcode "022F1 = "0 "0 "022F1
+\Umathcode "02300 = "0 "0 "02300
+\Umathcode "02308 = "4 "0 "02308
+\Umathcode "02309 = "5 "0 "02309
+\Umathcode "0230A = "4 "0 "0230A
+\Umathcode "0230B = "5 "0 "0230B
+\Umathcode "0231C = "4 "0 "0231C
+\Umathcode "0231D = "5 "0 "0231D
+\Umathcode "0231E = "4 "0 "0231E
+\Umathcode "0231F = "5 "0 "0231F
+\Umathcode "02322 = "3 "0 "02322
+\Umathcode "02323 = "3 "0 "02323
+\Umathcode "023B0 = "4 "0 "023B0
+\Umathcode "023B1 = "5 "0 "023B1
+\Umathcode "024C7 = "0 "0 "024C7
+\Umathcode "024C8 = "0 "0 "024C8
+\Umathcode "025A0 = "0 "0 "025A0
+\Umathcode "025A1 = "0 "0 "025A1
+\Umathcode "025A2 = "0 "0 "025A2
+\Umathcode "025B2 = "2 "0 "025B2
+\Umathcode "025B3 = "0 "0 "025B3
+\Umathcode "025B6 = "2 "0 "025B6
+\Umathcode "025B7 = "2 "0 "025B7
+\Umathcode "025BC = "2 "0 "025BC
+\Umathcode "025BD = "2 "0 "025BD
+\Umathcode "025C0 = "2 "0 "025C0
+\Umathcode "025C1 = "2 "0 "025C1
+\Umathcode "025CA = "0 "0 "025CA
+\Umathcode "025EF = "2 "0 "025EF
+\Umathcode "02605 = "0 "0 "02605
+\Umathcode "02660 = "0 "0 "02660
+\Umathcode "02661 = "0 "0 "02661
+\Umathcode "02662 = "0 "0 "02662
+\Umathcode "02663 = "0 "0 "02663
+\Umathcode "02666 = "0 "0 "02666
+\Umathcode "0266D = "0 "0 "0266D
+\Umathcode "0266E = "0 "0 "0266E
+\Umathcode "0266F = "0 "0 "0266F
+\Umathcode "02713 = "0 "0 "02713
+\Umathcode "02720 = "0 "0 "02720
+\Umathcode "027E6 = "4 "0 "027E6
+\Umathcode "027E7 = "5 "0 "027E7
+\Umathcode "027E8 = "4 "0 "027E8
+\Umathcode "027E9 = "5 "0 "027E9
+\Umathcode "027EA = "4 "0 "027EA
+\Umathcode "027EB = "5 "0 "027EB
+\Umathcode "027EE = "4 "0 "027EE
+\Umathcode "027EF = "5 "0 "027EF
+\Umathcode "027F5 = "3 "0 "027F5
+\Umathcode "027F6 = "3 "0 "027F6
+\Umathcode "027F7 = "3 "0 "027F7
+\Umathcode "027F8 = "3 "0 "027F8
+\Umathcode "027F9 = "3 "0 "027F9
+\Umathcode "027FA = "3 "0 "027FA
+\Umathcode "027FB = "3 "0 "027FB
+\Umathcode "027FC = "3 "0 "027FC
+\Umathcode "027FD = "3 "0 "027FD
+\Umathcode "027FE = "3 "0 "027FE
+\Umathcode "027FF = "3 "0 "027FF
+\Umathcode "02906 = "3 "0 "02906
+\Umathcode "02907 = "3 "0 "02907
+\Umathcode "0290A = "3 "0 "0290A
+\Umathcode "0290B = "3 "0 "0290B
+\Umathcode "0290C = "3 "0 "0290C
+\Umathcode "0290D = "3 "0 "0290D
+\Umathcode "02911 = "3 "0 "02911
+\Umathcode "02916 = "3 "0 "02916
+\Umathcode "02917 = "3 "0 "02917
+\Umathcode "02921 = "3 "0 "02921
+\Umathcode "02922 = "3 "0 "02922
+\Umathcode "02923 = "3 "0 "02923
+\Umathcode "02924 = "3 "0 "02924
+\Umathcode "02925 = "3 "0 "02925
+\Umathcode "02926 = "3 "0 "02926
+\Umathcode "02A00 = "1 "0 "02A00
+\Umathcode "02A01 = "1 "0 "02A01
+\Umathcode "02A02 = "1 "0 "02A02
+\Umathcode "02A03 = "1 "0 "02A03
+\Umathcode "02A04 = "1 "0 "02A04
+\Umathcode "02A05 = "1 "0 "02A05
+\Umathcode "02A06 = "1 "0 "02A06
+\Umathcode "02A09 = "1 "0 "02A09
+\Umathcode "02A3F = "2 "0 "02A3F
+\Umathcode "02A7D = "3 "0 "02A7D
+\Umathcode "02A7E = "3 "0 "02A7E
+\Umathcode "02A85 = "3 "0 "02A85
+\Umathcode "02A86 = "3 "0 "02A86
+\Umathcode "02A87 = "3 "0 "02A87
+\Umathcode "02A88 = "3 "0 "02A88
+\Umathcode "02A89 = "3 "0 "02A89
+\Umathcode "02A8A = "3 "0 "02A8A
+\Umathcode "02A8B = "3 "0 "02A8B
+\Umathcode "02A8C = "3 "0 "02A8C
+\Umathcode "02A95 = "3 "0 "02A95
+\Umathcode "02A96 = "3 "0 "02A96
+\Umathcode "02AAF = "3 "0 "02AAF
+\Umathcode "02AB0 = "3 "0 "02AB0
+\Umathcode "02AB1 = "3 "0 "02AB1
+\Umathcode "02AB2 = "3 "0 "02AB2
+\Umathcode "02AB3 = "3 "0 "02AB3
+\Umathcode "02AB4 = "3 "0 "02AB4
+\Umathcode "02AB5 = "3 "0 "02AB5
+\Umathcode "02AB6 = "3 "0 "02AB6
+\Umathcode "02AB7 = "3 "0 "02AB7
+\Umathcode "02AB8 = "3 "0 "02AB8
+\Umathcode "02AB9 = "3 "0 "02AB9
+\Umathcode "02ABA = "3 "0 "02ABA
+\Umathcode "02AC5 = "3 "0 "02AC5
+\Umathcode "02AC6 = "3 "0 "02AC6
+\Umathcode "02ACB = "3 "0 "02ACB
+\Umathcode "02ACC = "3 "0 "02ACC
+\Umathcode "12035 = "0 "0 "12035
+\Umathcode "1D6A4 = "0 "0 "1D6A4
+\Umathcode "1D6A5 = "0 "0 "1D6A5
+\Umathcode "1D6FB = "0 "0 "1D6FB
+\Umathcode "1D717 = "0 "0 "1D717
+\Umathcode "1D718 = "0 "0 "1D718
+
+% gaps .. done in lua (as example)
+
+% \Umathcode "1D455 = "0 "0 "0210E
+% \Umathcode "1D49D = "0 "0 "0212C
+% \Umathcode "1D4A0 = "0 "0 "02130
+% \Umathcode "1D4A1 = "0 "0 "02131
+% \Umathcode "1D4A3 = "0 "0 "0210B
+% \Umathcode "1D4A4 = "0 "0 "02110
+% \Umathcode "1D4A7 = "0 "0 "02112
+% \Umathcode "1D4A8 = "0 "0 "02133
+% \Umathcode "1D4AD = "0 "0 "0211B
+% \Umathcode "1D4BA = "0 "0 "0212F
+% \Umathcode "1D4BC = "0 "0 "0210A
+% \Umathcode "1D4C4 = "0 "0 "02134
+% \Umathcode "1D506 = "0 "0 "0212D
+% \Umathcode "1D50B = "0 "0 "0210C
+% \Umathcode "1D50C = "0 "0 "02111
+% \Umathcode "1D515 = "0 "0 "0211C
+% \Umathcode "1D51D = "0 "0 "02128
+% \Umathcode "1D53A = "0 "0 "02102
+% \Umathcode "1D53F = "0 "0 "0210D
+% \Umathcode "1D545 = "0 "0 "02115
+% \Umathcode "1D547 = "0 "0 "02119
+% \Umathcode "1D548 = "0 "0 "0211A
+% \Umathcode "1D549 = "0 "0 "0211D
+% \Umathcode "1D551 = "0 "0 "02124
+
+% initialization
+
+\the\everymathit
+
+% a couple of definitions (we could also use \mathchardef):
+
+\def\acute {\Umathaccent"0"0"0000B4 }
+\def\acwopencirclearrow {\Umathchar "3"0"0021BA }
+\def\aleph {\Umathchar "0"0"002135 }
+\def\Alpha {\Umathchar "0"0"000391 }
+\def\alpha {\Umathchar "0"0"0003B1 }
+\def\amalg {\Umathchar "2"0"002A3F }
+\def\angle {\Umathchar "0"0"002220 }
+\def\Angstrom {\Umathchar "0"0"00212B }
+\def\approx {\Umathchar "3"0"002248 }
+\def\approxEq {\Umathchar "3"0"002245 }
+\def\approxeq {\Umathchar "3"0"00224A }
+\def\approxnEq {\Umathchar "3"0"002247 }
+\def\arrowvert {\Umathchar "0"0"00007C }
+\def\Arrowvert {\Umathchar "0"0"002016 }
+\def\ast {\Umathchar "2"0"002217 }
+\def\ast {\Umathchar "2"0"002217 }
+\def\asymp {\Umathchar "3"0"00224D }
+\def\backepsilon {\Umathchar "0"0"0003F6 }
+\def\backprime {\Umathchar "0"0"012035 }
+\def\backsim {\Umathchar "3"0"00223D }
+\def\backslash {\Umathchar "0"0"00005C }
+\def\bar {\Umathaccent"0"0"0000AF }
+\def\barleftarrow {\Umathchar "3"0"0021E4 }
+\def\barleftarrowrightarrowbar {\Umathchar "3"0"0021B9 }
+\def\barovernorthwestarrow {\Umathchar "3"0"0021B8 }
+\def\barwedge {\Umathchar "2"0"0022BC }
+\def\because {\Umathchar "3"0"002235 }
+\def\Beta {\Umathchar "0"0"000392 }
+\def\beta {\Umathchar "0"0"0003B2 }
+\def\beth {\Umathchar "0"0"002136 }
+\def\between {\Umathchar "3"0"00226C }
+\def\bigcap {\Umathchar "1"0"0022C2 }
+\def\bigcirc {\Umathchar "2"0"0025EF }
+\def\bigcircle {\Umathchar "2"0"0020DD }
+\def\bigcircle {\Umathchar "2"0"0020DD }
+\def\bigcup {\Umathchar "1"0"0022C3 }
+\def\bigdiamond {\Umathchar "0"0"0020DF }
+\def\bigodot {\Umathchar "1"0"002A00 }
+\def\bigoplus {\Umathchar "1"0"002A01 }
+\def\bigotimes {\Umathchar "1"0"002A02 }
+\def\bigsqcap {\Umathchar "1"0"002A05 }
+\def\bigsqcup {\Umathchar "1"0"002A06 }
+\def\bigsquare {\Umathchar "0"0"0020DE }
+\def\bigstar {\Umathchar "0"0"002605 }
+\def\bigtimes {\Umathchar "1"0"002A09 }
+\def\bigtriangledown {\Umathchar "2"0"0025BD }
+\def\bigtriangleup {\Umathchar "2"0"0025B3 }
+\def\bigudot {\Umathchar "1"0"002A03 }
+\def\biguplus {\Umathchar "1"0"002A04 }
+\def\bigvee {\Umathchar "1"0"0022C1 }
+\def\bigwedge {\Umathchar "1"0"0022C0 }
+\def\blacklozenge {\Umathchar "0"0"002666 }
+\def\blacksquare {\Umathchar "0"0"0025A0 }
+\def\blacktriangle {\Umathchar "2"0"0025B2 }
+\def\blacktriangledown {\Umathchar "2"0"0025BC }
+\def\blacktriangleleft {\Umathchar "2"0"0025C0 }
+\def\blacktriangleright {\Umathchar "2"0"0025B6 }
+\def\bot {\Umathchar "0"0"0022A5 }
+\def\bowtie {\Umathchar "3"0"0022C8 }
+\def\Box {\Umathchar "0"0"0025A1 }
+\def\boxdot {\Umathchar "2"0"0022A1 }
+\def\boxminus {\Umathchar "2"0"00229F }
+\def\boxplus {\Umathchar "2"0"00229E }
+\def\boxtimes {\Umathchar "2"0"0022A0 }
+%def\braceld {\Umathchar "0"0"000000 }
+%def\bracerd {\Umathchar "0"0"000000 }
+%def\bracelu {\Umathchar "0"0"000000 }
+%def\braceru {\Umathchar "0"0"000000 }
+\def\breve {\Umathaccent"0"0"0002D8 }
+\def\bullet {\Umathchar "2"0"002022 }
+\def\bullet {\Umathchar "2"0"002022 }
+\def\Bumpeq {\Umathchar "3"0"00224E }
+\def\cap {\Umathchar "2"0"002229 }
+\def\Cap {\Umathchar "2"0"0022D2 }
+\def\carriagereturn {\Umathchar "0"0"0021B5 }
+\def\cdot {\Umathchar "2"0"0022C5 }
+\def\cdotp {\Umathchar "6"0"0022C5 }
+\def\cdots {\Umathchar "0"0"0022EF }
+\def\centerdot {\Umathchar "2"0"0000B7 }
+\def\check {\Umathaccent"0"0"0002C7 }
+\def\checkmark {\Umathchar "0"0"002713 }
+\def\Chi {\Umathchar "0"0"0003A7 }
+\def\chi {\Umathchar "0"0"0003C7 }
+\def\circ {\Umathchar "2"0"002218 }
+\def\circeq {\Umathchar "3"0"002257 }
+\def\circlearrowleft {\Umathchar "3"0"0021BB }
+\def\circlearrowright {\Umathchar "3"0"0021BA }
+\def\circledast {\Umathchar "2"0"00229B }
+\def\circledcirc {\Umathchar "2"0"00229A }
+\def\circleddash {\Umathchar "2"0"00229D }
+\def\circledequals {\Umathchar "2"0"00229C }
+\def\circledR {\Umathchar "0"0"0024C7 }
+\def\circledS {\Umathchar "0"0"0024C8 }
+\def\circleonrightarrow {\Umathchar "3"0"0021F4 }
+\def\clubsuit {\Umathchar "0"0"002663 }
+\def\colon {\Umathchar "6"0"002236 }
+\def\colonequals {\Umathchar "3"0"002254 }
+\def\complement {\Umathchar "0"0"002201 }
+\def\complexes {\Umathchar "0"0"002102 }
+\def\cong {\Umathchar "3"0"002245 }
+\def\coprod {\Umathchar "1"0"002210 }
+\def\cup {\Umathchar "2"0"00222A }
+\def\Cup {\Umathchar "2"0"0022D3 }
+\def\curlyeqprec {\Umathchar "3"0"0022DE }
+\def\curlyeqsucc {\Umathchar "3"0"0022DF }
+\def\curlyvee {\Umathchar "2"0"0022CE }
+\def\curlywedge {\Umathchar "2"0"0022CF }
+\def\curvearrowleft {\Umathchar "3"0"0021B6 }
+\def\curvearrowright {\Umathchar "3"0"0021B7 }
+\def\cwopencirclearrow {\Umathchar "3"0"0021BB }
+\def\dag {\Umathchar "0"0"002020 }
+\def\dagger {\Umathchar "2"0"002020 }
+\def\daleth {\Umathchar "0"0"002138 }
+\def\dasharrow {\Umathchar "3"0"0021E2 }
+\def\dashedleftarrow {\Umathchar "3"0"00290C }
+\def\dashedrightarrow {\Umathchar "3"0"00290D }
+\def\dashv {\Umathchar "3"0"0022A3 }
+\def\ddag {\Umathchar "0"0"002021 }
+\def\ddagger {\Umathchar "2"0"002021 }
+\def\dddot {\Umathaccent"0"0"0020DB }
+\def\ddot {\Umathaccent"0"0"0000A8 }
+\def\ddots {\Umathchar "0"0"0022F1 }
+\def\Ddownarrow {\Umathchar "3"0"00290B }
+\def\definedeq {\Umathchar "3"0"00225D }
+\def\Delta {\Umathchar "0"0"000394 }
+\def\delta {\Umathchar "0"0"0003B4 }
+\def\diamond {\Umathchar "2"0"0022C4 }
+\def\diamondsuit {\Umathchar "0"0"002662 }
+\def\differentialD {\Umathchar "0"0"002145 }
+\def\differentiald {\Umathchar "0"0"002146 }
+\def\digamma {\Umathchar "0"0"0003DC }
+\def\div {\Umathchar "2"0"0000F7 }
+\def\divideontimes {\Umathchar "2"0"0022C7 }
+\def\divides {\Umathchar "2"0"002223 }
+\def\dot {\Umathaccent"0"0"0002D9 }
+\def\doteq {\Umathchar "3"0"002250 }
+\def\Doteq {\Umathchar "3"0"002251 }
+\def\doteqdot {\Umathchar "3"0"002251 }
+\def\dotminus {\Umathchar "2"0"002238 }
+\def\dotplus {\Umathchar "2"0"002214 }
+\def\dots {\Umathchar "0"0"002026 }
+\def\dottedrightarrow {\Umathchar "3"0"002911 }
+\def\doublecap {\Umathchar "2"0"0022D2 }
+\def\doublecup {\Umathchar "2"0"0022D3 }
+\def\doubleprime {\Umathchar "0"0"002033 }
+\def\downarrow {\Umathchar "3"0"002193 }
+\def\Downarrow {\Umathchar "3"0"0021D3 }
+\def\downdasharrow {\Umathchar "3"0"0021E3 }
+\def\downdownarrows {\Umathchar "3"0"0021CA }
+\def\downharpoonleft {\Umathchar "3"0"0021C3 }
+\def\downharpoonright {\Umathchar "3"0"0021C2 }
+\def\downuparrows {\Umathchar "3"0"0021F5 }
+\def\downwhitearrow {\Umathchar "0"0"0021E9 }
+\def\downzigzagarrow {\Umathchar "3"0"0021AF }
+\def\ell {\Umathchar "0"0"002113 }
+\def\emptyset {\Umathchar "0"0"002205 }
+\def\Epsilon {\Umathchar "0"0"000395 }
+\def\epsilon {\Umathchar "0"0"0003F5 }
+\def\eq {\Umathchar "3"0"00003D }
+\def\eqcirc {\Umathchar "3"0"002256 }
+\def\eqgtr {\Umathchar "3"0"0022DD }
+\def\eqless {\Umathchar "3"0"0022DC }
+\def\eqsim {\Umathchar "3"0"002242 }
+\def\eqslantgtr {\Umathchar "3"0"002A96 }
+\def\eqslantless {\Umathchar "3"0"002A95 }
+\def\equalscolon {\Umathchar "3"0"002255 }
+\def\equiv {\Umathchar "3"0"002261 }
+\def\Eta {\Umathchar "0"0"000397 }
+\def\eta {\Umathchar "0"0"0003B7 }
+\def\eth {\Umathchar "0"0"0000F0 }
+\def\Eulerconst {\Umathchar "0"0"002107 }
+\def\exists {\Umathchar "0"0"002203 }
+\def\exponentiale {\Umathchar "0"0"002147 }
+\def\fallingdotseq {\Umathchar "3"0"002252 }
+\def\Finv {\Umathchar "0"0"002132 }
+\def\flat {\Umathchar "0"0"00266D }
+\def\forall {\Umathchar "0"0"002200 }
+\def\frown {\Umathchar "3"0"002322 }
+\def\Game {\Umathchar "0"0"002141 }
+\def\Gamma {\Umathchar "0"0"000393 }
+\def\gamma {\Umathchar "0"0"0003B3 }
+\def\ge {\Umathchar "3"0"002265 }
+\def\geq {\Umathchar "3"0"002265 }
+\def\geqq {\Umathchar "3"0"002267 }
+\def\geqslant {\Umathchar "3"0"002A7E }
+\def\gets {\Umathchar "3"0"002190 }
+\def\gg {\Umathchar "3"0"00226B }
+\def\ggg {\Umathchar "3"0"0022D9 }
+\def\gggtr {\Umathchar "3"0"0022D9 }
+\def\gimel {\Umathchar "0"0"002137 }
+\def\gnapprox {\Umathchar "3"0"002A8A }
+\def\gneqq {\Umathchar "3"0"002269 }
+\def\gnsim {\Umathchar "3"0"0022E7 }
+\def\grave {\Umathaccent"0"0"000060 }
+\def\gt {\Umathchar "3"0"00003E }
+\def\gtrapprox {\Umathchar "3"0"002A86 }
+\def\gtrdot {\Umathchar "2"0"0022D7 }
+\def\gtreqless {\Umathchar "3"0"0022DB }
+\def\gtreqqless {\Umathchar "3"0"002A8C }
+\def\gtrless {\Umathchar "3"0"002277 }
+\def\gtrsim {\Umathchar "3"0"002273 }
+\def\hat {\Umathaccent"0"0"0002C6 }
+\def\hbar {\Umathchar "0"0"00210F }
+\def\heartsuit {\Umathchar "0"0"002661 }
+\def\hookleftarrow {\Umathchar "3"0"0021A9 }
+\def\hookrightarrow {\Umathchar "3"0"0021AA }
+\def\hslash {\Umathchar "0"0"00210F }
+\def\iiint {\Umathchar "1"0"00222D }
+\def\iiintop {\Umathchar "0"0"00222D }
+\def\iint {\Umathchar "1"0"00222C }
+\def\iintop {\Umathchar "0"0"00222C }
+\def\Im {\Umathchar "0"0"002111 }
+\def\imaginaryi {\Umathchar "0"0"002148 }
+\def\imaginaryj {\Umathchar "0"0"002149 }
+\def\imath {\Umathchar "0"0"01D6A4 }
+\def\imply {\Umathchar "3"0"0021D2 }
+\def\in {\Umathchar "0"0"002208 }
+\def\infty {\Umathchar "0"0"00221E }
+\def\int {\Umathchar "1"0"00222B }
+\def\intclockwise {\Umathchar "1"0"002231 }
+\def\integers {\Umathchar "0"0"002124 }
+\def\intercal {\Umathchar "2"0"0022BA }
+\def\intop {\Umathchar "0"0"00222B }
+\def\Iota {\Umathchar "0"0"000399 }
+\def\iota {\Umathchar "0"0"0003B9 }
+\def\jmath {\Umathchar "0"0"01D6A5 }
+\def\Join {\Umathchar "3"0"0022C8 }
+\def\Kappa {\Umathchar "0"0"00039A }
+\def\kappa {\Umathchar "0"0"0003BA }
+\def\Lambda {\Umathchar "0"0"00039B }
+\def\lambda {\Umathchar "0"0"0003BB }
+\def\land {\Umathchar "2"0"002227 }
+\def\langle {\Udelimiter "4"0"0027E8 }
+\def\lbrace {\Udelimiter "4"0"00007B }
+\def\lbrack {\Udelimiter "4"0"00005B }
+\def\lceil {\Udelimiter "4"0"002308 }
+\def\lceiling {\Udelimiter "4"0"002308 }
+\def\ldotp {\Umathchar "6"0"00002E }
+\def\ldots {\Umathchar "0"0"002026 }
+\def\Ldsh {\Umathchar "3"0"0021B2 }
+\def\le {\Umathchar "3"0"002264 }
+\def\leadsto {\Umathchar "3"0"0021DD }
+\def\leftarrow {\Umathchar "3"0"002190 }
+\def\Leftarrow {\Umathchar "3"0"0021D0 }
+\def\leftarrowtail {\Umathchar "3"0"0021A2 }
+\def\leftarrowtriangle {\Umathchar "3"0"0021FD }
+\def\leftdasharrow {\Umathchar "3"0"0021E0 }
+\def\leftharpoondown {\Umathchar "3"0"0021BD }
+\def\leftharpoonup {\Umathchar "3"0"0021BC }
+\def\leftleftarrows {\Umathchar "3"0"0021C7 }
+\def\leftrightarrow {\Umathchar "3"0"002194 }
+\def\Leftrightarrow {\Umathchar "3"0"0021D4 }
+\def\leftrightarrows {\Umathchar "3"0"0021C6 }
+\def\leftrightarrowtriangle {\Umathchar "3"0"0021FF }
+\def\leftrightharpoons {\Umathchar "3"0"0021CB }
+\def\leftrightsquigarrow {\Umathchar "3"0"0021AD }
+\def\leftsquigarrow {\Umathchar "3"0"0021DC }
+\def\leftthreetimes {\Umathchar "2"0"0022CB }
+\def\leftwavearrow {\Umathchar "3"0"00219C }
+\def\leftwhitearrow {\Umathchar "0"0"0021E6 }
+\def\leq {\Umathchar "3"0"002264 }
+\def\leqq {\Umathchar "3"0"002266 }
+\def\leqslant {\Umathchar "3"0"002A7D }
+\def\lessapprox {\Umathchar "3"0"002A85 }
+\def\lessdot {\Umathchar "2"0"0022D6 }
+\def\lesseqgtr {\Umathchar "3"0"0022DA }
+\def\lesseqqgtr {\Umathchar "3"0"002A8B }
+\def\lessgtr {\Umathchar "3"0"002276 }
+\def\lesssim {\Umathchar "3"0"002272 }
+\def\lfloor {\Udelimiter "4"0"00230A }
+\def\lgroup {\Udelimiter "4"0"0027EE }
+\def\lhook {\Umathchar "3"0"0FE322 }
+\def\lhooknwarrow {\Umathchar "3"0"002923 }
+\def\lhooksearrow {\Umathchar "3"0"002925 }
+\def\linefeed {\Umathchar "0"0"0021B4 }
+\def\ll {\Umathchar "3"0"00226A }
+\def\llangle {\Udelimiter "4"0"0027EA }
+\def\llbracket {\Udelimiter "4"0"0027E6 }
+\def\llcorner {\Udelimiter "4"0"00231E }
+\def\Lleftarrow {\Umathchar "3"0"0021DA }
+\def\lll {\Umathchar "3"0"0022D8 }
+\def\llless {\Umathchar "3"0"0022D8 }
+\def\lmoustache {\Udelimiter "4"0"0023B0 }
+\def\lnapprox {\Umathchar "3"0"002A89 }
+\def\lneq {\Umathchar "3"0"002A87 }
+\def\lneqq {\Umathchar "3"0"002268 }
+\def\lnot {\Umathchar "0"0"0000AC }
+\def\lnsim {\Umathchar "3"0"0022E6 }
+\def\longleftarrow {\Umathchar "3"0"0027F5 }
+\def\Longleftarrow {\Umathchar "3"0"0027F8 }
+\def\longleftrightarrow {\Umathchar "3"0"0027F7 }
+\def\Longleftrightarrow {\Umathchar "3"0"0027FA }
+\def\longmapsfrom {\Umathchar "3"0"0027FB }
+\def\Longmapsfrom {\Umathchar "3"0"0027FD }
+\def\longmapsto {\Umathchar "3"0"0027FC }
+\def\Longmapsto {\Umathchar "3"0"0027FE }
+\def\longrightarrow {\Umathchar "3"0"0027F6 }
+\def\Longrightarrow {\Umathchar "3"0"0027F9 }
+\def\longrightsquigarrow {\Umathchar "3"0"0027FF }
+\def\looparrowleft {\Umathchar "3"0"0021AB }
+\def\looparrowright {\Umathchar "3"0"0021AC }
+\def\lor {\Umathchar "2"0"002228 }
+\def\lozenge {\Umathchar "0"0"0025CA }
+\def\lparent {\Udelimiter "4"0"000028 }
+\def\lrcorner {\Udelimiter "5"0"00231F }
+\def\Lsh {\Umathchar "3"0"0021B0 }
+\def\lt {\Umathchar "3"0"00003C }
+\def\ltimes {\Umathchar "2"0"0022C9 }
+\def\lvert {\Udelimiter "4"0"00007C }
+\def\lVert {\Udelimiter "4"0"002016 }
+\def\maltese {\Umathchar "0"0"002720 }
+\def\mapsdown {\Umathchar "3"0"0021A7 }
+\def\mapsfrom {\Umathchar "3"0"0021A4 }
+\def\Mapsfrom {\Umathchar "3"0"002906 }
+\def\mapsfromchar {\Umathchar "3"0"0FE324 }
+\def\mapsto {\Umathchar "3"0"0021A6 }
+\def\Mapsto {\Umathchar "3"0"002907 }
+\def\mapstochar {\Umathchar "3"0"0FE321 }
+\def\mapsup {\Umathchar "3"0"0021A5 }
+\def\mathring {\Umathaccent"0"0"0002DA }
+\def\measuredangle {\Umathchar "0"0"002221 }
+\def\measuredeq {\Umathchar "3"0"00225E }
+\def\mho {\Umathchar "0"0"002127 }
+\def\mid {\Umathchar "3"0"00007C }
+\def\minus {\Umathchar "2"0"002212 }
+\def\minuscolon {\Umathchar "2"0"002239 }
+\def\models {\Umathchar "3"0"0022A7 }
+\def\mp {\Umathchar "2"0"002213 }
+\def\Mu {\Umathchar "0"0"00039C }
+\def\mu {\Umathchar "0"0"0003BC }
+\def\multimap {\Umathchar "3"0"0022B8 }
+\def\napprox {\Umathchar "3"0"002249 }
+\def\napproxEq {\Umathchar "3"0"002246 }
+\def\nasymp {\Umathchar "3"0"00226D }
+\def\natural {\Umathchar "0"0"00266E }
+\def\naturalnumbers {\Umathchar "0"0"002115 }
+\def\ncong {\Umathchar "3"0"002246 }
+\def\ndivides {\Umathchar "2"0"002224 }
+\def\ne {\Umathchar "3"0"002260 }
+\def\nearrow {\Umathchar "3"0"002197 }
+\def\Nearrow {\Umathchar "3"0"0021D7 }
+\def\neg {\Umathchar "0"0"0000AC }
+\def\negativesign {\Umathchar "2"0"00207B }
+\def\neq {\Umathchar "3"0"002260 }
+\def\nequiv {\Umathchar "3"0"002262 }
+\def\neswarrow {\Umathchar "3"0"002922 }
+\def\nexists {\Umathchar "0"0"002204 }
+\def\ngeq {\Umathchar "3"0"002271 }
+\def\ngtr {\Umathchar "3"0"00226F }
+\def\ngtrless {\Umathchar "3"0"002279 }
+\def\ngtrsim {\Umathchar "3"0"002275 }
+\def\nHdownarrow {\Umathchar "3"0"0021DF }
+\def\nHuparrow {\Umathchar "3"0"0021DE }
+\def\ni {\Umathchar "3"0"00220B }
+\def\nin {\Umathchar "3"0"002209 }
+\def\nleftarrow {\Umathchar "3"0"00219A }
+\def\nLeftarrow {\Umathchar "3"0"0021CD }
+\def\nleftrightarrow {\Umathchar "3"0"0021AE }
+\def\nLeftrightarrow {\Umathchar "3"0"0021CE }
+\def\nleq {\Umathchar "3"0"002270 }
+\def\nless {\Umathchar "3"0"00226E }
+\def\nlessgtr {\Umathchar "3"0"002278 }
+\def\nlesssim {\Umathchar "3"0"002274 }
+\def\nmid {\Umathchar "3"0"002224 }
+\def\nni {\Umathchar "3"0"00220C }
+\def\not {\Umathchar "3"0"000338 }
+\def\notin {\Umathchar "3"0"002209 }
+\def\nowns {\Umathchar "3"0"00220C }
+\def\nparallel {\Umathchar "3"0"002226 }
+\def\nprec {\Umathchar "3"0"002280 }
+\def\npreccurlyeq {\Umathchar "3"0"0022E0 }
+\def\nrightarrow {\Umathchar "3"0"00219B }
+\def\nRightarrow {\Umathchar "3"0"0021CF }
+\def\nsim {\Umathchar "3"0"002241 }
+\def\nsimeq {\Umathchar "3"0"002244 }
+\def\nsqsubseteq {\Umathchar "3"0"0022E2 }
+\def\nsqsupseteq {\Umathchar "3"0"0022E3 }
+\def\nsubset {\Umathchar "3"0"002284 }
+\def\nsubseteq {\Umathchar "3"0"002288 }
+\def\nsucc {\Umathchar "3"0"002281 }
+\def\nsucccurlyeq {\Umathchar "3"0"0022E1 }
+\def\nsupset {\Umathchar "3"0"002285 }
+\def\nsupseteq {\Umathchar "3"0"002289 }
+\def\ntriangleleft {\Umathchar "3"0"0022EB }
+\def\ntrianglelefteq {\Umathchar "3"0"0022EC }
+\def\ntriangleright {\Umathchar "3"0"0022EA }
+\def\ntrianglerighteq {\Umathchar "3"0"0022ED }
+\def\Nu {\Umathchar "0"0"00039D }
+\def\nu {\Umathchar "0"0"0003BD }
+\def\nvdash {\Umathchar "3"0"0022AC }
+\def\nvDash {\Umathchar "3"0"0022AD }
+\def\nVdash {\Umathchar "3"0"0022AE }
+\def\nVDash {\Umathchar "3"0"0022AF }
+\def\nvleftarrow {\Umathchar "3"0"0021F7 }
+\def\nVleftarrow {\Umathchar "3"0"0021FA }
+\def\nvleftrightarrow {\Umathchar "3"0"0021F9 }
+\def\nVleftrightarrow {\Umathchar "3"0"0021FC }
+\def\nvrightarrow {\Umathchar "3"0"0021F8 }
+\def\nVrightarrow {\Umathchar "3"0"0021FB }
+\def\nwarrow {\Umathchar "3"0"002196 }
+\def\Nwarrow {\Umathchar "3"0"0021D6 }
+\def\nwsearrow {\Umathchar "3"0"002921 }
+\def\odot {\Umathchar "2"0"002299 }
+\def\ohm {\Umathchar "0"0"002126 }
+\def\oiiint {\Umathchar "1"0"002230 }
+\def\oiint {\Umathchar "1"0"00222F }
+\def\oint {\Umathchar "1"0"00222E }
+\def\ointclockwise {\Umathchar "1"0"002232 }
+\def\ointctrclockwise {\Umathchar "1"0"002233 }
+\def\Omega {\Umathchar "0"0"0003A9 }
+\def\omega {\Umathchar "0"0"0003C9 }
+\def\Omicron {\Umathchar "0"0"00039F }
+\def\omicron {\Umathchar "0"0"0003BF }
+\def\ominus {\Umathchar "2"0"002296 }
+\def\oplus {\Umathchar "2"0"002295 }
+\def\oslash {\Umathchar "2"0"002298 }
+\def\otimes {\Umathchar "2"0"002297 }
+\def\overbar {\Umathaccent"0"0"00203E }
+\def\overbrace {\Umathaccent"0"0"0023DE }
+\def\overbracket {\Umathaccent"0"0"0023B4 }
+\def\overparent {\Umathaccent"0"0"0023DC }
+\def\owns {\Umathchar "3"0"00220B }
+\def\P {\Umathchar "0"0"0000B6 }
+\def\parallel {\Umathchar "3"0"002225 }
+\def\partial {\Umathchar "0"0"002202 }
+\def\perp {\Umathchar "3"0"0022A5 }
+\def\Phi {\Umathchar "0"0"0003A6 }
+\def\phi {\Umathchar "0"0"0003D5 }
+\def\Pi {\Umathchar "0"0"0003A0 }
+\def\pi {\Umathchar "0"0"0003C0 }
+\def\pitchfork {\Umathchar "3"0"0022D4 }
+\def\Plankconst {\Umathchar "0"0"00210E }
+\def\pm {\Umathchar "2"0"0000B1 }
+\def\positivesign {\Umathchar "2"0"00207A }
+\def\prec {\Umathchar "3"0"00227A }
+\def\precapprox {\Umathchar "3"0"002AB7 }
+\def\preccurlyeq {\Umathchar "3"0"00227C }
+\def\preceq {\Umathchar "3"0"002AAF }
+\def\preceqq {\Umathchar "3"0"002AB3 }
+\def\precnapprox {\Umathchar "3"0"002AB9 }
+\def\precneq {\Umathchar "3"0"002AB1 }
+\def\precneqq {\Umathchar "3"0"002AB5 }
+\def\precnsim {\Umathchar "3"0"0022E8 }
+\def\precsim {\Umathchar "3"0"00227E }
+\def\prime {\Umathchar "0"0"002032 }
+\def\primes {\Umathchar "0"0"002119 }
+\def\prod {\Umathchar "1"0"00220F }
+\def\PropertyLine {\Umathchar "0"0"00214A }
+\def\propto {\Umathchar "3"0"00221D }
+\def\Psi {\Umathchar "0"0"0003A8 }
+\def\psi {\Umathchar "0"0"0003C8 }
+\def\questionedeq {\Umathchar "3"0"00225F }
+\def\rangle {\Udelimiter "5"0"0027E9 }
+\def\rationals {\Umathchar "0"0"00211A }
+\def\rbrace {\Udelimiter "5"0"00007D }
+\def\rbrack {\Udelimiter "5"0"00005D }
+\def\rceil {\Udelimiter "5"0"002309 }
+\def\rceiling {\Udelimiter "5"0"002309 }
+\def\Rdsh {\Umathchar "3"0"0021B3 }
+\def\Re {\Umathchar "0"0"00211C }
+\def\reals {\Umathchar "0"0"00211D }
+\def\Relbar {\Umathchar "3"0"00003D }
+\def\relbar {\Umathchar "3"0"002212 }
+\def\restriction {\Umathchar "3"0"0021BE }
+\def\rfloor {\Udelimiter "5"0"00230B }
+\def\rgroup {\Udelimiter "5"0"0027EF }
+\def\Rho {\Umathchar "0"0"0003A1 }
+\def\rho {\Umathchar "0"0"0003C1 }
+\def\rhook {\Umathchar "3"0"0FE323 }
+\def\rhooknearrow {\Umathchar "3"0"002924 }
+\def\rhookswarrow {\Umathchar "3"0"002926 }
+\def\rightangle {\Umathchar "0"0"00221F }
+\def\rightarrow {\Umathchar "3"0"002192 }
+\def\Rightarrow {\Umathchar "3"0"0021D2 }
+\def\rightarrowbar {\Umathchar "3"0"0021E5 }
+\def\rightarrowtail {\Umathchar "3"0"0021A3 }
+\def\rightarrowtriangle {\Umathchar "3"0"0021FE }
+\def\rightdasharrow {\Umathchar "3"0"0021E2 }
+\def\rightharpoondown {\Umathchar "3"0"0021C1 }
+\def\rightharpoonup {\Umathchar "3"0"0021C0 }
+\def\rightleftarrows {\Umathchar "3"0"0021C4 }
+\def\rightleftharpoons {\Umathchar "3"0"0021CC }
+\def\rightrightarrows {\Umathchar "3"0"0021C9 }
+\def\rightsquigarrow {\Umathchar "3"0"0021DD }
+\def\rightthreearrows {\Umathchar "3"0"0021F6 }
+\def\rightthreetimes {\Umathchar "2"0"0022CC }
+\def\rightwavearrow {\Umathchar "3"0"00219D }
+\def\rightwhitearrow {\Umathchar "0"0"0021E8 }
+\def\risingdotseq {\Umathchar "3"0"002253 }
+\def\rmoustache {\Udelimiter "5"0"0023B1 }
+\def\rneq {\Umathchar "3"0"002A88 }
+\def\rparent {\Udelimiter "5"0"000029 }
+\def\rrangle {\Udelimiter "5"0"0027EB }
+\def\rrbracket {\Udelimiter "5"0"0027E7 }
+\def\Rrightarrow {\Umathchar "3"0"0021DB }
+\def\Rsh {\Umathchar "3"0"0021B1 }
+\def\rtimes {\Umathchar "2"0"0022CA }
+\def\rvert {\Udelimiter "5"0"00007C }
+\def\rVert {\Udelimiter "5"0"002016 }
+\def\S {\Umathchar "0"0"0000A7 }
+\def\searrow {\Umathchar "3"0"002198 }
+\def\Searrow {\Umathchar "3"0"0021D8 }
+\def\setminus {\Umathchar "2"0"002216 }
+\def\sharp {\Umathchar "0"0"00266F }
+\def\Sigma {\Umathchar "0"0"0003A3 }
+\def\sigma {\Umathchar "0"0"0003C3 }
+\def\sim {\Umathchar "3"0"00223C }
+\def\simeq {\Umathchar "3"0"002243 }
+\def\slash {\Umathchar "0"0"002044 }
+\def\smile {\Umathchar "3"0"002323 }
+\def\solidus {\Udelimiter "5"0"002044 }
+\def\spadesuit {\Umathchar "0"0"002660 }
+\def\sphericalangle {\Umathchar "0"0"002222 }
+\def\sqcap {\Umathchar "2"0"002293 }
+\def\sqcup {\Umathchar "2"0"002294 }
+\def\sqsubset {\Umathchar "3"0"00228F }
+\def\sqsubseteq {\Umathchar "2"0"002291 }
+\def\sqsubsetneq {\Umathchar "3"0"0022E4 }
+\def\sqsupset {\Umathchar "3"0"002290 }
+\def\sqsupseteq {\Umathchar "2"0"002292 }
+\def\sqsupsetneq {\Umathchar "3"0"0022E5 }
+\def\square {\Umathchar "0"0"0025A1 }
+\def\squaredots {\Umathchar "3"0"002237 }
+\def\star {\Umathchar "2"0"0022C6 }
+\def\stareq {\Umathchar "3"0"00225B }
+\def\subset {\Umathchar "3"0"002282 }
+\def\Subset {\Umathchar "3"0"0022D0 }
+\def\subseteq {\Umathchar "3"0"002286 }
+\def\subseteqq {\Umathchar "3"0"002AC5 }
+\def\subsetneq {\Umathchar "3"0"00228A }
+\def\subsetneqq {\Umathchar "3"0"002ACB }
+\def\succ {\Umathchar "3"0"00227B }
+\def\succapprox {\Umathchar "3"0"002AB8 }
+\def\succcurlyeq {\Umathchar "3"0"00227D }
+\def\succeq {\Umathchar "3"0"002AB0 }
+\def\succeqq {\Umathchar "3"0"002AB4 }
+\def\succnapprox {\Umathchar "3"0"002ABA }
+\def\succneq {\Umathchar "3"0"002AB2 }
+\def\succneqq {\Umathchar "3"0"002AB6 }
+\def\succnsim {\Umathchar "3"0"0022E9 }
+\def\succsim {\Umathchar "3"0"00227F }
+\def\sum {\Umathchar "1"0"002211 }
+\def\supset {\Umathchar "3"0"002283 }
+\def\Supset {\Umathchar "3"0"0022D1 }
+\def\supseteq {\Umathchar "3"0"002287 }
+\def\supseteqq {\Umathchar "3"0"002AC6 }
+\def\supsetneq {\Umathchar "3"0"00228B }
+\def\supsetneqq {\Umathchar "3"0"002ACC }
+\def\surd {\Umathchar "2"0"00221A }
+\def\swarrow {\Umathchar "3"0"002199 }
+\def\Swarrow {\Umathchar "3"0"0021D9 }
+\def\Tau {\Umathchar "0"0"0003A4 }
+\def\tau {\Umathchar "0"0"0003C4 }
+\def\therefore {\Umathchar "3"0"002234 }
+\def\Theta {\Umathchar "0"0"000398 }
+\def\theta {\Umathchar "0"0"0003B8 }
+\def\tilde {\Umathaccent"0"0"0002DC }
+\def\times {\Umathchar "2"0"0000D7 }
+\def\to {\Umathchar "3"0"002192 }
+\def\top {\Umathchar "0"0"0022A4 }
+\def\triangle {\Umathchar "0"0"0025B3 }
+\def\triangledown {\Umathchar "2"0"0025BD }
+\def\triangleleft {\Umathchar "2"0"0025C1 }
+\def\triangleq {\Umathchar "3"0"00225C }
+\def\triangleright {\Umathchar "2"0"0025B7 }
+\def\tripleprime {\Umathchar "0"0"002034 }
+\def\turnediota {\Umathchar "0"0"002129 }
+\def\twoheaddownarrow {\Umathchar "3"0"0021A1 }
+\def\twoheadleftarrow {\Umathchar "3"0"00219E }
+\def\twoheadrightarrow {\Umathchar "3"0"0021A0 }
+\def\twoheadrightarrowtail {\Umathchar "3"0"002916 }
+\def\twoheaduparrow {\Umathchar "3"0"00219F }
+\def\udots {\Umathchar "0"0"0022F0 }
+\def\ulcorner {\Udelimiter "4"0"00231C }
+\def\underbar {\Umathaccent bottom "0"0"00203E }
+\def\underbrace {\Umathaccent bottom "0"0"0023DF }
+\def\underbracket {\Umathaccent bottom "0"0"0023B5 }
+\def\underparent {\Umathaccent bottom "0"0"0023DD }
+\def\upand {\Umathchar "2"0"00214B }
+\def\uparrow {\Umathchar "3"0"002191 }
+\def\Uparrow {\Umathchar "3"0"0021D1 }
+\def\updasharrow {\Umathchar "3"0"0021E1 }
+\def\updownarrow {\Umathchar "3"0"002195 }
+\def\Updownarrow {\Umathchar "3"0"0021D5 }
+\def\updownarrowbar {\Umathchar "0"0"0021A8 }
+\def\updownarrows {\Umathchar "3"0"0021C5 }
+\def\upharpoonleft {\Umathchar "3"0"0021BF }
+\def\upharpoonright {\Umathchar "3"0"0021BE }
+\def\uplus {\Umathchar "2"0"00228E }
+\def\Upsilon {\Umathchar "0"0"0003A5 }
+\def\upsilon {\Umathchar "0"0"0003C5 }
+\def\upuparrows {\Umathchar "3"0"0021C8 }
+\def\upwhitearrow {\Umathchar "0"0"0021E7 }
+\def\urcorner {\Udelimiter "5"0"00231D }
+\def\Uuparrow {\Umathchar "3"0"00290A }
+\def\varepsilon {\Umathchar "0"0"0003B5 }
+\def\varkappa {\Umathchar "0"0"0003F0 }
+\def\varkappa {\Umathchar "0"0"0003F0 }
+\def\varnothing {\Umathchar "0"0"002300 }
+\def\varphi {\Umathchar "0"0"0003C6 }
+\def\varpi {\Umathchar "0"0"0003D6 }
+\def\varrho {\Umathchar "0"0"01D71A }
+\def\varsigma {\Umathchar "0"0"0003C2 }
+\def\vartheta {\Umathchar "0"0"01D717 }
+\def\varTheta {\Umathchar "0"0"0003D1 }
+\def\vdash {\Umathchar "3"0"0022A2 }
+\def\vDash {\Umathchar "3"0"0022A8 }
+\def\Vdash {\Umathchar "3"0"0022A9 }
+\def\VDash {\Umathchar "3"0"0022AB }
+\def\vdots {\Umathchar "0"0"0022EE }
+\def\vec {\Umathaccent"0"0"0020D7 }
+\def\vee {\Umathchar "2"0"002228 }
+\def\veebar {\Umathchar "2"0"0022BB }
+\def\veeeq {\Umathchar "3"0"00225A }
+\def\vert {\Udelimiter "0"0"00007C }
+\def\Vert {\Udelimiter "0"0"002016 }
+\def\Vvdash {\Umathchar "3"0"0022AA }
+\def\wedge {\Umathchar "2"0"002227 }
+\def\wedgeeq {\Umathchar "3"0"002259 }
+\def\whitearrowupfrombar {\Umathchar "0"0"0021EB }
+\def\widehat {\Umathaccent"0"0"000302 }
+\def\widetilde {\Umathaccent"0"0"000303 }
+\def\wp {\Umathchar "0"0"002118 }
+\def\wr {\Umathchar "2"0"002240 }
+\def\Xi {\Umathchar "0"0"00039E }
+\def\xi {\Umathchar "0"0"0003BE }
+\def\yen {\Umathchar "0"0"0000A5 }
+\def\Zeta {\Umathchar "0"0"000396 }
+\def\zeta {\Umathchar "0"0"0003B6 }
+
+% a few definitions:
+
+\def\sqrt {\Uroot "0 "221A{}}
+\def\root#1\of{\Uroot "0 "221A{#1}}
+
+% \skewchar\teni='177 \skewchar\seveni='177 \skewchar\fivei='177
+% \skewchar\tensy='60 \skewchar\sevensy='60 \skewchar\fivesy='60
+
+\chardef\% = "25
+\chardef\& = "26
+\chardef\# = "23
+\chardef\$ = "24
+\chardef\_ = "5F
+
+\let\ss ß
+\let\ae æ
+\let\oe œ
+\let\o ø
+\let\AE Æ
+\let\OE Œ
+\let\O Ø
+\let\i ı
+\let\aa å
+\let\l ł
+\let\L Ł
+\let\AA Å
+\let\copyright ©
+
+% just use utf
+
+\def\`#1{\string\`\string{#1\string}}
+\def\'#1{\string\'\string{#1\string}}
+\def\v#1{\string\v\string{#1\string}}
+\def\u#1{\string\u\string{#1\string}}
+\def\=#1{\string\=\string{#1\string}}
+\def\^#1{\string\^\string{#1\string}}
+\def\.#1{\string\.\string{#1\string}}
+\def\H#1{\string\H\string{#1\string}}
+\def\~#1{\string\~\string{#1\string}}
+\def\"#1{\string\"\string{#1\string}}
+\def\d#1{\string\d\string{#1\string}}
+\def\b#1{\string\b\string{#1\string}}
+\def\c#1{\string\c\string{#1\string}}
+
+\endinput
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-mplib.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-mplib.lua
new file mode 100644
index 00000000000..fd6eb975c84
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-mplib.lua
@@ -0,0 +1,591 @@
+if not modules then modules = { } end modules ['luatex-mplib'] = {
+ version = 1.001,
+ comment = "companion to luatex-mplib.tex",
+ author = "Hans Hagen & Taco Hoekwater",
+ copyright = "ConTeXt Development Team",
+ license = "public domain",
+}
+
+--[[ldx--
+<p>This module is a stripped down version of libraries that are used
+by <l n='context'/>. It can be used in other macro packages and/or
+serve as an example. Embedding in a macro package is upto others and
+normally boils down to inputting <t>supp-mpl.tex</t>.</p>
+--ldx]]--
+
+if metapost and metapost.version then
+
+ --[[ldx--
+ <p>Let's silently quit and make sure that no one loads it
+ manually in <l n='context'/>.</p>
+ --ldx]]--
+
+else
+
+ local format, match, gsub = string.format, string.match, string.gsub
+ local concat = table.concat
+ local abs = math.abs
+
+ local mplib = require ('mplib')
+ local kpse = require ('kpse')
+
+ --[[ldx--
+ <p>We create a namespace and some variables to it. If a namespace is
+ already defined it wil not be initialized. This permits hooking
+ in code beforehand.</p>
+
+ <p>We don't make a format automatically. After all, distributions
+ might have their own preferences and normally a format (mem) file will
+ have some special place in the <l n='tex'/> tree. Also, there can already
+ be format files, different memort settings and other nasty pitfalls that
+ we don't want to interfere with. If you want, you can define a function
+ <t>metapost.make(name,mem_name) that does the job.</t></p>
+ --ldx]]--
+
+ metapost = metapost or { }
+ metapost.version = 1.00
+ metapost.showlog = metapost.showlog or false
+ metapost.lastlog = ""
+
+ --[[ldx--
+ <p>A few helpers, taken from <t>l-file.lua</t>.</p>
+ --ldx]]--
+
+ local file = file or { }
+
+ function file.replacesuffix(filename, suffix)
+ return (string.gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+ end
+
+ function file.stripsuffix(filename)
+ return (string.gsub(filename,"%.[%a%d]+$",""))
+ end
+
+ --[[ldx--
+ <p>We use the <l n='kpse'/> library unless a finder is already
+ defined.</p>
+ --ldx]]--
+
+ local mpkpse = kpse.new("luatex","mpost")
+
+ metapost.finder = metapost.finder or function(name, mode, ftype)
+ if mode == "w" then
+ return name
+ else
+ return mpkpse:find_file(name,ftype)
+ end
+ end
+
+ --[[ldx--
+ <p>You can use your own reported if needed, as long as it handles multiple
+ arguments and formatted strings.</p>
+ --ldx]]--
+
+ metapost.report = metapost.report or function(...)
+ texio.write(format("<mplib: %s>",format(...)))
+ end
+
+ --[[ldx--
+ <p>The rest of this module is not documented. More info can be found in the
+ <l n='luatex'/> manual, articles in user group journals and the files that
+ ship with <l n='context'/>.</p>
+ --ldx]]--
+
+ function metapost.resetlastlog()
+ metapost.lastlog = ""
+ end
+
+ local mplibone = tonumber(mplib.version()) <= 1.50
+
+ if mplibone then
+
+ metapost.make = metapost.make or function(name,mem_name,dump)
+ local t = os.clock()
+ local mpx = mplib.new {
+ ini_version = true,
+ find_file = metapost.finder,
+ job_name = file.stripsuffix(name)
+ }
+ mpx:execute(string.format("input %s ;",name))
+ if dump then
+ mpx:execute("dump ;")
+ metapost.report("format %s made and dumped for %s in %0.3f seconds",mem_name,name,os.clock()-t)
+ else
+ metapost.report("%s read in %0.3f seconds",name,os.clock()-t)
+ end
+ return mpx
+ end
+
+ function metapost.load(name)
+ local mem_name = file.replacesuffix(name,"mem")
+ local mpx = mplib.new {
+ ini_version = false,
+ mem_name = mem_name,
+ find_file = metapost.finder
+ }
+ if not mpx and type(metapost.make) == "function" then
+ -- when i have time i'll locate the format and dump
+ mpx = metapost.make(name,mem_name)
+ end
+ if mpx then
+ metapost.report("using format %s",mem_name,false)
+ return mpx, nil
+ else
+ return nil, { status = 99, error = "out of memory or invalid format" }
+ end
+ end
+
+ else
+
+ local preamble = [[
+ boolean mplib ; mplib := true ;
+ let dump = endinput ;
+ input %s ;
+ ]]
+
+ metapost.make = metapost.make or function()
+ end
+
+ local template = [[
+ \pdfoutput=1
+ \pdfpkresolution600
+ \pdfcompresslevel=9
+ %s\relax
+ \hsize=100in
+ \vsize=\hsize
+ \hoffset=-1in
+ \voffset=\hoffset
+ \topskip=0pt
+ \setbox0=\hbox{%s}\relax
+ \pageheight=\ht0
+ \pagewidth=\wd0
+ \box0
+ \bye
+ ]]
+
+ metapost.texrunner = "mtxrun --script plain"
+
+ local texruns = 0 -- per document
+ local texhash = { } -- per document
+
+ function metapost.maketext(mpd,str,what)
+ -- inefficient but one can always use metafun .. it's more a test
+ -- feature
+ local verbatimtex = mpd.verbatimtex
+ if not verbatimtex then
+ verbatimtex = { }
+ mpd.verbatimtex = verbatimtex
+ end
+ if what == 1 then
+ table.insert(verbatimtex,str)
+ else
+ local texcode = format(template,concat(verbatimtex,"\n"),str)
+ local texdone = texhash[texcode]
+ local jobname = tex.jobname
+ if not texdone then
+ texruns = texruns + 1
+ texdone = texruns
+ texhash[texcode] = texdone
+ local texname = format("%s-mplib-%s.tmp",jobname,texdone)
+ local logname = format("%s-mplib-%s.log",jobname,texdone)
+ local pdfname = format("%s-mplib-%s.pdf",jobname,texdone)
+ io.savedata(texname,texcode)
+ os.execute(format("%s %s",metapost.texrunner,texname))
+ os.remove(texname)
+ os.remove(logname)
+ end
+ return format('"image::%s-mplib-%s.pdf" infont defaultfont',jobname,texdone)
+ end
+ end
+
+ local function mpprint(buffer,...)
+ for i=1,select("#",...) do
+ local value = select(i,...)
+ if value ~= nil then
+ local t = type(value)
+ if t == "number" then
+ buffer[#buffer+1] = format("%.16f",value)
+ elseif t == "string" then
+ buffer[#buffer+1] = value
+ elseif t == "table" then
+ buffer[#buffer+1] = "(" .. concat(value,",") .. ")"
+ else -- boolean or whatever
+ buffer[#buffer+1] = tostring(value)
+ end
+ end
+ end
+ end
+
+ function metapost.runscript(mpd,code)
+ local code = loadstring(code)
+ if type(code) == "function" then
+ local buffer = { }
+ function metapost.print(...)
+ mpprint(buffer,...)
+ end
+ code()
+ -- mpd.buffer = buffer -- for tracing
+ return concat(buffer,"")
+ end
+ return ""
+ end
+
+ function metapost.load(name)
+ local mpd = {
+ buffer = { },
+ verbatim = { }
+ }
+ local mpx = mplib.new {
+ ini_version = true,
+ find_file = metapost.finder,
+ make_text = function(...) return metapost.maketext (mpd,...) end,
+ run_script = function(...) return metapost.runscript(mpd,...) end,
+ extensions = 1,
+ }
+ local result
+ if not mpx then
+ result = { status = 99, error = "out of memory"}
+ else
+ result = mpx:execute(format(preamble, file.replacesuffix(name,"mp")))
+ end
+ metapost.reporterror(result)
+ return mpx, result
+ end
+
+ end
+
+ function metapost.unload(mpx)
+ if mpx then
+ mpx:finish()
+ end
+ end
+
+ function metapost.reporterror(result)
+ if not result then
+ metapost.report("mp error: no result object returned")
+ elseif result.status > 0 then
+ local t, e, l = result.term, result.error, result.log
+ if t then
+ metapost.report("mp terminal: %s",t)
+ end
+ if e then
+ metapost.report("mp error: %s", e)
+ end
+ if not t and not e and l then
+ metapost.lastlog = metapost.lastlog .. "\n " .. l
+ metapost.report("mp log: %s",l)
+ else
+ metapost.report("mp error: unknown, no error, terminal or log messages")
+ end
+ else
+ return false
+ end
+ return true
+ end
+
+ function metapost.process(mpx, data)
+ local converted, result = false, {}
+ mpx = metapost.load(mpx)
+ if mpx and data then
+ local result = mpx:execute(data)
+ if not result then
+ metapost.report("mp error: no result object returned")
+ elseif result.status > 0 then
+ metapost.report("mp error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error"))
+ elseif metapost.showlog then
+ metapost.lastlog = metapost.lastlog .. "\n" .. result.term
+ metapost.report("mp info: %s",result.term or "no-term")
+ elseif result.fig then
+ converted = metapost.convert(result)
+ else
+ metapost.report("mp error: unknown error, maybe no beginfig/endfig")
+ end
+ else
+ metapost.report("mp error: mem file not found")
+ end
+ return converted, result
+ end
+
+ local function getobjects(result,figure,f)
+ return figure:objects()
+ end
+
+ function metapost.convert(result,flusher)
+ metapost.flush(result,flusher)
+ return true -- done
+ end
+
+ --[[ldx--
+ <p>We removed some message and tracing code. We might even remove the flusher</p>
+ --ldx]]--
+
+ local function pdf_startfigure(n,llx,lly,urx,ury)
+ tex.sprint(format("\\startMPLIBtoPDF{%s}{%s}{%s}{%s}",llx,lly,urx,ury))
+ end
+
+ local function pdf_stopfigure()
+ tex.sprint("\\stopMPLIBtoPDF")
+ end
+
+ function pdf_literalcode(fmt,...) -- table
+ tex.sprint(format("\\MPLIBtoPDF{%s}",format(fmt,...)))
+ end
+
+ function pdf_textfigure(font,size,text,width,height,depth)
+ local how, what = match(text,"^(.-)::(.+)$")
+ if how == "image" then
+ tex.sprint(format("\\MPLIBpdftext{%s}{%s}",what,depth))
+ else
+ text = gsub(text,".","\\hbox{%1}") -- kerning happens in metapost
+ tex.sprint(format("\\MPLIBtextext{%s}{%s}{%s}{%s}",font,size,text,depth))
+ end
+ end
+
+ local bend_tolerance = 131/65536
+
+ local rx, sx, sy, ry, tx, ty, divider = 1, 0, 0, 1, 0, 0, 1
+
+ local function pen_characteristics(object)
+ local t = mplib.pen_info(object)
+ rx, ry, sx, sy, tx, ty = t.rx, t.ry, t.sx, t.sy, t.tx, t.ty
+ divider = sx*sy - rx*ry
+ return not (sx==1 and rx==0 and ry==0 and sy==1 and tx==0 and ty==0), t.width
+ end
+
+ local function concat(px, py) -- no tx, ty here
+ return (sy*px-ry*py)/divider,(sx*py-rx*px)/divider
+ end
+
+ local function curved(ith,pth)
+ local d = pth.left_x - ith.right_x
+ if abs(ith.right_x - ith.x_coord - d) <= bend_tolerance and abs(pth.x_coord - pth.left_x - d) <= bend_tolerance then
+ d = pth.left_y - ith.right_y
+ if abs(ith.right_y - ith.y_coord - d) <= bend_tolerance and abs(pth.y_coord - pth.left_y - d) <= bend_tolerance then
+ return false
+ end
+ end
+ return true
+ end
+
+ local function flushnormalpath(path,open)
+ local pth, ith
+ for i=1,#path do
+ pth = path[i]
+ if not ith then
+ pdf_literalcode("%f %f m",pth.x_coord,pth.y_coord)
+ elseif curved(ith,pth) then
+ pdf_literalcode("%f %f %f %f %f %f c",ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord)
+ else
+ pdf_literalcode("%f %f l",pth.x_coord,pth.y_coord)
+ end
+ ith = pth
+ end
+ if not open then
+ local one = path[1]
+ if curved(pth,one) then
+ pdf_literalcode("%f %f %f %f %f %f c",pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord )
+ else
+ pdf_literalcode("%f %f l",one.x_coord,one.y_coord)
+ end
+ elseif #path == 1 then
+ -- special case .. draw point
+ local one = path[1]
+ pdf_literalcode("%f %f l",one.x_coord,one.y_coord)
+ end
+ return t
+ end
+
+ local function flushconcatpath(path,open)
+ pdf_literalcode("%f %f %f %f %f %f cm", sx, rx, ry, sy, tx ,ty)
+ local pth, ith
+ for i=1,#path do
+ pth = path[i]
+ if not ith then
+ pdf_literalcode("%f %f m",concat(pth.x_coord,pth.y_coord))
+ elseif curved(ith,pth) then
+ local a, b = concat(ith.right_x,ith.right_y)
+ local c, d = concat(pth.left_x,pth.left_y)
+ pdf_literalcode("%f %f %f %f %f %f c",a,b,c,d,concat(pth.x_coord, pth.y_coord))
+ else
+ pdf_literalcode("%f %f l",concat(pth.x_coord, pth.y_coord))
+ end
+ ith = pth
+ end
+ if not open then
+ local one = path[1]
+ if curved(pth,one) then
+ local a, b = concat(pth.right_x,pth.right_y)
+ local c, d = concat(one.left_x,one.left_y)
+ pdf_literalcode("%f %f %f %f %f %f c",a,b,c,d,concat(one.x_coord, one.y_coord))
+ else
+ pdf_literalcode("%f %f l",concat(one.x_coord,one.y_coord))
+ end
+ elseif #path == 1 then
+ -- special case .. draw point
+ local one = path[1]
+ pdf_literalcode("%f %f l",concat(one.x_coord,one.y_coord))
+ end
+ return t
+ end
+
+ --[[ldx--
+ <p>Support for specials has been removed.</p>
+ --ldx]]--
+
+ function metapost.flush(result,flusher)
+ if result then
+ local figures = result.fig
+ if figures then
+ for f=1, #figures do
+ metapost.report("flushing figure %s",f)
+ local figure = figures[f]
+ local objects = getobjects(result,figure,f)
+ local fignum = tonumber(match(figure:filename(),"([%d]+)$") or figure:charcode() or 0)
+ local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
+ local bbox = figure:boundingbox()
+ local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4] -- faster than unpack
+ if urx < llx then
+ -- invalid
+ pdf_startfigure(fignum,0,0,0,0)
+ pdf_stopfigure()
+ else
+ pdf_startfigure(fignum,llx,lly,urx,ury)
+ pdf_literalcode("q")
+ if objects then
+ for o=1,#objects do
+ local object = objects[o]
+ local objecttype = object.type
+ if objecttype == "start_bounds" or objecttype == "stop_bounds" then
+ -- skip
+ elseif objecttype == "start_clip" then
+ pdf_literalcode("q")
+ flushnormalpath(object.path,t,false)
+ pdf_literalcode("W n")
+ elseif objecttype == "stop_clip" then
+ pdf_literalcode("Q")
+ miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
+ elseif objecttype == "special" then
+ -- not supported
+ elseif objecttype == "text" then
+ local ot = object.transform -- 3,4,5,6,1,2
+ pdf_literalcode("q %f %f %f %f %f %f cm",ot[3],ot[4],ot[5],ot[6],ot[1],ot[2])
+ pdf_textfigure(object.font,object.dsize,object.text,object.width,object.height,object.depth)
+ pdf_literalcode("Q")
+ else
+ local cs = object.color
+ local cr = false
+ if cs and #cs > 0 then
+ cs, cr = metapost.colorconverter(cs)
+ pdf_literalcode(cs)
+ end
+ local ml = object.miterlimit
+ if ml and ml ~= miterlimit then
+ miterlimit = ml
+ pdf_literalcode("%f M",ml)
+ end
+ local lj = object.linejoin
+ if lj and lj ~= linejoin then
+ linejoin = lj
+ pdf_literalcode("%i j",lj)
+ end
+ local lc = object.linecap
+ if lc and lc ~= linecap then
+ linecap = lc
+ pdf_literalcode("%i J",lc)
+ end
+ local dl = object.dash
+ if dl then
+ local d = format("[%s] %i d",concat(dl.dashes or {}," "),dl.offset)
+ if d ~= dashed then
+ dashed = d
+ pdf_literalcode(dashed)
+ end
+ elseif dashed then
+ pdf_literalcode("[] 0 d")
+ dashed = false
+ end
+ local path = object.path
+ local transformed, penwidth = false, 1
+ local open = path and path[1].left_type and path[#path].right_type
+ local pen = object.pen
+ if pen then
+ if pen.type == 'elliptical' then
+ transformed, penwidth = pen_characteristics(object) -- boolean, value
+ pdf_literalcode("%f w",penwidth)
+ if objecttype == 'fill' then
+ objecttype = 'both'
+ end
+ else -- calculated by mplib itself
+ objecttype = 'fill'
+ end
+ end
+ if transformed then
+ pdf_literalcode("q")
+ end
+ if path then
+ if transformed then
+ flushconcatpath(path,open)
+ else
+ flushnormalpath(path,open)
+ end
+ if objecttype == "fill" then
+ pdf_literalcode("h f")
+ elseif objecttype == "outline" then
+ pdf_literalcode((open and "S") or "h S")
+ elseif objecttype == "both" then
+ pdf_literalcode("h B")
+ end
+ end
+ if transformed then
+ pdf_literalcode("Q")
+ end
+ local path = object.htap
+ if path then
+ if transformed then
+ pdf_literalcode("q")
+ end
+ if transformed then
+ flushconcatpath(path,open)
+ else
+ flushnormalpath(path,open)
+ end
+ if objecttype == "fill" then
+ pdf_literalcode("h f")
+ elseif objecttype == "outline" then
+ pdf_literalcode((open and "S") or "h S")
+ elseif objecttype == "both" then
+ pdf_literalcode("h B")
+ end
+ if transformed then
+ pdf_literalcode("Q")
+ end
+ end
+ if cr then
+ pdf_literalcode(cr)
+ end
+ end
+ end
+ end
+ pdf_literalcode("Q")
+ pdf_stopfigure()
+ end
+ end
+ end
+ end
+ end
+
+ function metapost.colorconverter(cr)
+ local n = #cr
+ if n == 4 then
+ local c, m, y, k = cr[1], cr[2], cr[3], cr[4]
+ return format("%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K",c,m,y,k,c,m,y,k), "0 g 0 G"
+ elseif n == 3 then
+ local r, g, b = cr[1], cr[2], cr[3]
+ return format("%.3f %.3f %.3f rg %.3f %.3f %.3f RG",r,g,b,r,g,b), "0 g 0 G"
+ else
+ local s = cr[1]
+ return format("%.3f g %.3f G",s,s), "0 g 0 G"
+ end
+ end
+
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-mplib.tex b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-mplib.tex
new file mode 100644
index 00000000000..f9de4b223f4
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-mplib.tex
@@ -0,0 +1,140 @@
+%D \module
+%D [ file=luatex-mplib,
+%D version=2009.12.01,
+%D title=\LUATEX\ Support Macros,
+%D subtitle=\METAPOST\ to \PDF\ conversion,
+%D author=Taco Hoekwater \& Hans Hagen,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+
+%D This is the companion to the \LUA\ module \type {supp-mpl.lua}. Further
+%D embedding is up to others. A simple example of usage in plain \TEX\ is:
+%D
+%D \starttyping
+%D \pdfoutput=1
+%D
+%D \input luatex-mplib.tex
+%D
+%D \setmplibformat{plain}
+%D
+%D \mplibcode
+%D beginfig(1);
+%D draw fullcircle
+%D scaled 10cm
+%D withcolor red
+%D withpen pencircle xscaled 4mm yscaled 2mm rotated 30 ;
+%D endfig;
+%D \endmplibcode
+%D
+%D \end
+%D \stoptyping
+
+\def\setmplibformat#1{\def\mplibformat{#1}}
+
+\def\setupmplibcatcodes
+ {\catcode`\{=12 % could be optional .. not really needed
+ \catcode`\}=12 % could be optional .. not really needed
+ \catcode`\#=12
+ \catcode`\^=12
+ \catcode`\~=12
+ \catcode`\_=12
+ \catcode`\%=12
+ \catcode`\&=12
+ \catcode`\$=12 }
+
+\def\mplibcode
+ {\bgroup
+ \setupmplibcatcodes
+ \domplibcode}
+
+\long\def\domplibcode#1\endmplibcode
+ {\egroup
+ \directlua{metapost.process('\mplibformat',[[#1]])}}
+
+%D We default to \type {plain} \METAPOST:
+
+\def\mplibformat{plain}
+
+%D We use a dedicated scratchbox:
+
+\ifx\mplibscratchbox\undefined \newbox\mplibscratchbox \fi
+
+%D Now load the needed \LUA\ code.
+
+\directlua{dofile(kpse.find_file('luatex-mplib.lua'))}
+% \directlua{dofile(resolvers.findfile('luatex-mplib.lua'))}
+
+%D The following code takes care of encapsulating the literals:
+
+\def\startMPLIBtoPDF#1#2#3#4%
+ {\hbox\bgroup
+ \xdef\MPllx{#1}\xdef\MPlly{#2}%
+ \xdef\MPurx{#3}\xdef\MPury{#4}%
+ \xdef\MPwidth{\the\dimexpr#3bp-#1bp\relax}%
+ \xdef\MPheight{\the\dimexpr#4bp-#2bp\relax}%
+ \parskip0pt%
+ \leftskip0pt%
+ \parindent0pt%
+ \everypar{}%
+ \setbox\mplibscratchbox\vbox\bgroup
+ \noindent}
+
+\def\stopMPLIBtoPDF
+ {\egroup
+ \setbox\mplibscratchbox\hbox
+ {\hskip-\MPllx bp%
+ \raise-\MPlly bp%
+ \box\mplibscratchbox}%
+ \setbox\mplibscratchbox\vbox to \MPheight
+ {\vfill
+ \hsize\MPwidth
+ \wd\mplibscratchbox0pt%
+ \ht\mplibscratchbox0pt%
+ \dp\mplibscratchbox0pt%
+ \box\mplibscratchbox}%
+ \wd\mplibscratchbox\MPwidth
+ \ht\mplibscratchbox\MPheight
+ \box\mplibscratchbox
+ \egroup}
+
+%D The body of picture, except for text items, is taken care of by:
+
+\ifnum\pdfoutput>0
+ \let\MPLIBtoPDF\pdfliteral
+\else
+ \def\MPLIBtoPDF#1{\special{pdf:literal direct #1}} % not ok yet
+\fi
+
+%D Text items have a special handler:
+
+\def\MPLIBtextext#1#2#3#4%
+ {\begingroup
+ \setbox\mplibscratchbox\hbox
+ {\font\temp=#1 at #2bp%
+ \temp
+ #3}%
+ \setbox\mplibscratchbox\hbox
+ {\raise#4sp%
+ \box\mplibscratchbox}%
+ \wd\mplibscratchbox0pt%
+ \ht\mplibscratchbox0pt%
+ \dp\mplibscratchbox0pt%
+ \box\mplibscratchbox
+ \endgroup}
+
+\def\MPLIBpdftext#1#2%
+ {\ifcsname mplib::#1\endcsname
+ % already done, forgotten outside convert group
+ \message{<reusing mplib: #1>}%
+ \else
+ \message{<embedding mplib: #1>}%
+ \immediate\pdfximage{#1}% we cannot remove the file as it is included last
+ \expandafter\edef\csname mplib::#1\endcsname{\the\pdflastximage}%
+ \fi
+ \setbox\mplibscratchbox\hbox
+ {\raise#2sp\hbox{\pdfrefximage\csname mplib::#1\endcsname}}%
+ \wd\mplibscratchbox0pt%
+ \ht\mplibscratchbox0pt%
+ \dp\mplibscratchbox0pt%
+ \box\mplibscratchbox}
+
+\endinput
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-plain.tex b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-plain.tex
new file mode 100644
index 00000000000..99347ed2ce5
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-plain.tex
@@ -0,0 +1,53 @@
+%D \module
+%D [ file=luatex-plain,
+%D version=2009.12.01,
+%D title=\LUATEX\ Macros,
+%D subtitle=Plain Format,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+
+\input plain
+
+\directlua {tex.enableprimitives('', tex.extraprimitives())}
+
+% We assume that pdf is used.
+
+\ifdefined\pdfextension
+ \input luatex-pdf \relax
+\fi
+
+\pdfoutput 1
+
+% We set the page dimensions because otherwise the backend does weird things
+% when we have for instance this on a line of its own:
+%
+% \hbox to 100cm {\hss wide indeed\hss}
+%
+% The page dimension calculation is a fuzzy one as there are some compensations
+% for the \hoffset and \voffset and such. I remember long discussions and much
+% trial and error in figuring this out during pdftex development times. Where
+% a dvi driver will project on a papersize (and thereby clip) the pdf backend
+% has to deal with the lack of a page concept on tex by some guessing. Normally
+% a macro package will set the dimensions to something reasonable anyway.
+
+\pagewidth 8.5in
+\pageheight 11.0in
+
+% We load some code at runtime:
+
+\everyjob \expandafter {%
+ \the\everyjob
+ \input {luatex-basics}%
+ \input {luatex-fonts}%
+ \input {luatex-math}%
+ \input {luatex-languages}%
+ \input {luatex-mplib}%
+ \input {luatex-gadgets}%
+}
+
+% We also patch the version number:
+
+\edef\fmtversion{\fmtversion+luatex}
+
+\dump
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor-test.tex b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor-test.tex
new file mode 100644
index 00000000000..857b28f831b
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor-test.tex
@@ -0,0 +1,30 @@
+\ifdefined\inputpreprocessed
+
+ \def\TestOne[#1]%
+ {test one: [#1]\par}
+
+ \def\TestTwo#some%
+ {test two: #some\par}
+
+ \def\TestThree[#whatever][#more]%
+ {test three: [#more] and [#whatever]\par}
+
+ \def\TestFour[#one]#two%
+ {\def\TestFive[#alpha][#one]%
+ {test four and five: [#one], [#two] and [#alpha]}\par}
+
+ \def\TestSix[#{one}]#{two}%
+ {test six: [#{one}] and #{two}\par}
+
+ \TestOne [one]
+ \TestTwo {one}
+ \TestThree[one][two]
+ \TestFour [one]{two}
+ \TestFive [one][two]
+ \TestSix [one]{two}
+
+\else
+ \input{luatex-preprocessor.tex}
+ \inputpreprocessed{luatex-preprocessor-test.tex}
+ \expandafter \end
+\fi
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor.lua
new file mode 100644
index 00000000000..8faa0b47e69
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor.lua
@@ -0,0 +1,163 @@
+if not modules then modules = { } end modules ['luatex-preprocessor'] = {
+ version = 1.001,
+ comment = "companion to luatex-preprocessor.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx
+<p>This is a stripped down version of the preprocessor. In
+<l n='context'/> we have a bit more, use a different logger, and
+use a few optimizations. A few examples are shown at the end.</p>
+--ldx]]
+
+local rep, sub, gmatch = string.rep, string.sub, string.gmatch
+local insert, remove = table.insert, table.remove
+local setmetatable = setmetatable
+
+local stack, top, n, hashes = { }, nil, 0, { }
+
+local function set(s)
+ if top then
+ n = n + 1
+ if n > 9 then
+ texio.write_nl("number of arguments > 9, ignoring: " .. s)
+ else
+ local ns = #stack
+ local h = hashes[ns]
+ if not h then
+ h = rep("#",ns)
+ hashes[ns] = h
+ end
+ m = h .. n
+ top[s] = m
+ return m
+ end
+ end
+end
+
+local function get(s)
+ local m = top and top[s] or s
+ return m
+end
+
+local function push()
+ top = { }
+ n = 0
+ local s = stack[#stack]
+ if s then
+ setmetatable(top,{ __index = s })
+ end
+ insert(stack,top)
+end
+
+local function pop()
+ top = remove(stack)
+end
+
+local leftbrace = lpeg.P("{")
+local rightbrace = lpeg.P("}")
+local escape = lpeg.P("\\")
+
+local space = lpeg.P(" ")
+local spaces = space^1
+local newline = lpeg.S("\r\n")
+local nobrace = 1 - leftbrace - rightbrace
+
+local name = lpeg.R("AZ","az")^1
+local longname = (leftbrace/"") * (nobrace^1) * (rightbrace/"")
+local variable = lpeg.P("#") * lpeg.Cs(name + longname)
+local escapedname = escape * name
+local definer = escape * (lpeg.P("def") + lpeg.P("egdx") * lpeg.P("def"))
+local anything = lpeg.P(1)
+local always = lpeg.P(true)
+
+local pushlocal = always / push
+local poplocal = always / pop
+local declaration = variable / set
+local identifier = variable / get
+
+local function matcherror(str,pos)
+ texio.write_nl("runaway definition at: " .. sub(str,pos-30,pos))
+end
+
+local parser = lpeg.Cs { "converter",
+ definition = pushlocal
+ * definer
+ * escapedname
+ * (declaration + (1-leftbrace))^0
+ * lpeg.V("braced")
+ * poplocal,
+ braced = leftbrace
+ * ( lpeg.V("definition")
+ + identifier
+ + lpeg.V("braced")
+ + nobrace
+ )^0
+ * (rightbrace + lpeg.Cmt(always,matcherror)),
+ converter = (lpeg.V("definition") + anything)^1,
+}
+
+--[[ldx
+<p>We provide a few commands.</p>
+--ldx]]
+
+-- local texkpse
+
+local function find_file(...)
+ -- texkpse = texkpse or kpse.new("luatex","tex")
+ -- return texkpse:find_file(...) or ""
+ return kpse.find_file(...) or ""
+end
+
+commands = commands or { }
+
+function commands.preprocessed(str)
+ return lpeg.match(parser,str)
+end
+
+function commands.inputpreprocessed(name)
+ local name = find_file(name) or ""
+ if name ~= "" then
+ -- we could use io.loaddata as it's loaded in luatex-plain
+ local f = io.open(name,'rb')
+ if f then
+ texio.write("("..name)
+ local d = commands.preprocessed(f:read("*a"))
+ if d and d ~= "" then
+ texio.write("processed: " .. name)
+ for s in gmatch(d,"[^\n\r]+") do
+ tex.print(s) -- we do a dumb feedback
+ end
+ end
+ f:close()
+ texio.write(")")
+ else
+ tex.error("preprocessor error, invalid file: " .. name)
+ end
+ else
+ tex.error("preprocessor error, unknown file: " .. name)
+ end
+end
+
+function commands.preprocessfile(oldfile,newfile) -- no checking
+ if oldfile and oldfile ~= newfile then
+ local f = io.open(oldfile,'rb')
+ if f then
+ local g = io.open(newfile,'wb')
+ if g then
+ g:write(lpeg.match(parser,f:read("*a") or ""))
+ g:close()
+ end
+ f:close()
+ end
+ end
+end
+
+--~ print(preprocessed([[\def\test#oeps{test:#oeps}]]))
+--~ print(preprocessed([[\def\test#oeps{test:#{oeps}}]]))
+--~ print(preprocessed([[\def\test#{oeps:1}{test:#{oeps:1}}]]))
+--~ print(preprocessed([[\def\test#{oeps}{test:#oeps}]]))
+--~ preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}]])
+--~ print(preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}}]]))
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor.tex b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor.tex
new file mode 100644
index 00000000000..03b483f4185
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor.tex
@@ -0,0 +1,14 @@
+%D \module
+%D [ file=luatex-preprocessor,
+%D version=2010.12.02,
+%D title=\LUATEX\ Support Macros,
+%D subtitle=Generic Preprocessor,
+%D author=Hans Hagen,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+
+\directlua{dofile(kpse.find_file('luatex-preprocessor.lua'))}
+
+\def\inputpreprocessed#1%
+ {\directlua{commands.inputpreprocessed("#1")}}
+
+\endinput
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-reference.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-reference.lua
new file mode 100644
index 00000000000..ae366179ce1
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-reference.lua
@@ -0,0 +1,16456 @@
+-- merged file : luatex-fonts-merged.lua
+-- parent file : luatex-fonts.lua
+-- merge date : 11/19/15 19:13:15
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+_MAJORVERSION=tonumber(major) or 5
+_MINORVERSION=tonumber(minor) or 1
+_LUAVERSION=_MAJORVERSION+_MINORVERSION/10
+if not lpeg then
+ lpeg=require("lpeg")
+end
+if loadstring then
+ local loadnormal=load
+ function load(first,...)
+ if type(first)=="string" then
+ return loadstring(first,...)
+ else
+ return loadnormal(first,...)
+ end
+ end
+else
+ loadstring=load
+end
+if not ipairs then
+ local function iterate(a,i)
+ i=i+1
+ local v=a[i]
+ if v~=nil then
+ return i,v
+ end
+ end
+ function ipairs(a)
+ return iterate,a,0
+ end
+end
+if not pairs then
+ function pairs(t)
+ return next,t
+ end
+end
+if not table.unpack then
+ table.unpack=_G.unpack
+elseif not unpack then
+ _G.unpack=table.unpack
+end
+if not package.loaders then
+ package.loaders=package.searchers
+end
+local print,select,tostring=print,select,tostring
+local inspectors={}
+function setinspector(kind,inspector)
+ inspectors[kind]=inspector
+end
+function inspect(...)
+ for s=1,select("#",...) do
+ local value=select(s,...)
+ if value==nil then
+ print("nil")
+ else
+ local done=false
+ local kind=type(value)
+ local inspector=inspectors[kind]
+ if inspector then
+ done=inspector(value)
+ if done then
+ break
+ end
+ end
+ for kind,inspector in next,inspectors do
+ done=inspector(value)
+ if done then
+ break
+ end
+ end
+ if not done then
+ print(tostring(value))
+ end
+ end
+ end
+end
+local dummy=function() end
+function optionalrequire(...)
+ local ok,result=xpcall(require,dummy,...)
+ if ok then
+ return result
+ end
+end
+if lua then
+ lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
+end
+local flush=io.flush
+if flush then
+ local execute=os.execute if execute then function os.execute(...) flush() return execute(...) end end
+ local exec=os.exec if exec then function os.exec (...) flush() return exec (...) end end
+ local spawn=os.spawn if spawn then function os.spawn (...) flush() return spawn (...) end end
+ local popen=io.popen if popen then function io.popen (...) flush() return popen (...) end end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-lpeg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+lpeg=require("lpeg")
+if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end
+local type,next,tostring=type,next,tostring
+local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
+local floor=math.floor
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
+local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
+if setinspector then
+ setinspector("lpeg",function(v) if lpegtype(v) then lpegprint(v) return true end end)
+end
+lpeg.patterns=lpeg.patterns or {}
+local patterns=lpeg.patterns
+local anything=P(1)
+local endofstring=P(-1)
+local alwaysmatched=P(true)
+patterns.anything=anything
+patterns.endofstring=endofstring
+patterns.beginofstring=alwaysmatched
+patterns.alwaysmatched=alwaysmatched
+local sign=S('+-')
+local zero=P('0')
+local digit=R('09')
+local octdigit=R("07")
+local lowercase=R("az")
+local uppercase=R("AZ")
+local underscore=P("_")
+local hexdigit=digit+lowercase+uppercase
+local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
+local newline=P("\r")*(P("\n")+P(true))+P("\n")
+local escaped=P("\\")*anything
+local squote=P("'")
+local dquote=P('"')
+local space=P(" ")
+local period=P(".")
+local comma=P(",")
+local utfbom_32_be=P('\000\000\254\255')
+local utfbom_32_le=P('\255\254\000\000')
+local utfbom_16_be=P('\254\255')
+local utfbom_16_le=P('\255\254')
+local utfbom_8=P('\239\187\191')
+local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
+local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
+local utfstricttype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")
+local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
+local utf8next=R("\128\191")
+patterns.utfbom_32_be=utfbom_32_be
+patterns.utfbom_32_le=utfbom_32_le
+patterns.utfbom_16_be=utfbom_16_be
+patterns.utfbom_16_le=utfbom_16_le
+patterns.utfbom_8=utfbom_8
+patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
+patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
+patterns.utf_32_be_nl=P("\000\000\000\r\000\000\000\n")+P("\000\000\000\r")+P("\000\000\000\n")
+patterns.utf_32_le_nl=P("\r\000\000\000\n\000\000\000")+P("\r\000\000\000")+P("\n\000\000\000")
+patterns.utf8one=R("\000\127")
+patterns.utf8two=R("\194\223")*utf8next
+patterns.utf8three=R("\224\239")*utf8next*utf8next
+patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
+patterns.utfbom=utfbom
+patterns.utftype=utftype
+patterns.utfstricttype=utfstricttype
+patterns.utfoffset=utfoffset
+local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
+local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
+local utf8character=P(1)*R("\128\191")^0
+patterns.utf8=utf8char
+patterns.utf8char=utf8char
+patterns.utf8character=utf8character
+patterns.validutf8=validutf8char
+patterns.validutf8char=validutf8char
+local eol=S("\n\r")
+local spacer=S(" \t\f\v")
+local whitespace=eol+spacer
+local nonspacer=1-spacer
+local nonwhitespace=1-whitespace
+patterns.eol=eol
+patterns.spacer=spacer
+patterns.whitespace=whitespace
+patterns.nonspacer=nonspacer
+patterns.nonwhitespace=nonwhitespace
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
+local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+local b_collapser=Cs(whitespace^0/""*(nonwhitespace^1+whitespace^1/" ")^0)
+local e_collapser=Cs((whitespace^1*P(-1)/""+nonwhitespace^1+whitespace^1/" ")^0)
+local m_collapser=Cs((nonwhitespace^1+whitespace^1/" ")^0)
+local b_stripper=Cs(spacer^0/""*(nonspacer^1+spacer^1/" ")^0)
+local e_stripper=Cs((spacer^1*P(-1)/""+nonspacer^1+spacer^1/" ")^0)
+local m_stripper=Cs((nonspacer^1+spacer^1/" ")^0)
+patterns.stripper=stripper
+patterns.fullstripper=fullstripper
+patterns.collapser=collapser
+patterns.b_collapser=b_collapser
+patterns.m_collapser=m_collapser
+patterns.e_collapser=e_collapser
+patterns.b_stripper=b_stripper
+patterns.m_stripper=m_stripper
+patterns.e_stripper=e_stripper
+patterns.lowercase=lowercase
+patterns.uppercase=uppercase
+patterns.letter=patterns.lowercase+patterns.uppercase
+patterns.space=space
+patterns.tab=P("\t")
+patterns.spaceortab=patterns.space+patterns.tab
+patterns.newline=newline
+patterns.emptyline=newline^1
+patterns.equal=P("=")
+patterns.comma=comma
+patterns.commaspacer=comma*spacer^0
+patterns.period=period
+patterns.colon=P(":")
+patterns.semicolon=P(";")
+patterns.underscore=underscore
+patterns.escaped=escaped
+patterns.squote=squote
+patterns.dquote=dquote
+patterns.nosquote=(escaped+(1-squote))^0
+patterns.nodquote=(escaped+(1-dquote))^0
+patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"")
+patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"")
+patterns.unquoted=patterns.undouble+patterns.unsingle
+patterns.unspacer=((patterns.spacer^1)/"")^0
+patterns.singlequoted=squote*patterns.nosquote*squote
+patterns.doublequoted=dquote*patterns.nodquote*dquote
+patterns.quoted=patterns.doublequoted+patterns.singlequoted
+patterns.digit=digit
+patterns.octdigit=octdigit
+patterns.hexdigit=hexdigit
+patterns.sign=sign
+patterns.cardinal=digit^1
+patterns.integer=sign^-1*digit^1
+patterns.unsigned=digit^0*period*digit^1
+patterns.float=sign^-1*patterns.unsigned
+patterns.cunsigned=digit^0*comma*digit^1
+patterns.cpunsigned=digit^0*(period+comma)*digit^1
+patterns.cfloat=sign^-1*patterns.cunsigned
+patterns.cpfloat=sign^-1*patterns.cpunsigned
+patterns.number=patterns.float+patterns.integer
+patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.cpnumber=patterns.cpfloat+patterns.integer
+patterns.oct=zero*octdigit^1
+patterns.octal=patterns.oct
+patterns.HEX=zero*P("X")*(digit+uppercase)^1
+patterns.hex=zero*P("x")*(digit+lowercase)^1
+patterns.hexadecimal=zero*S("xX")*hexdigit^1
+patterns.hexafloat=sign^-1*zero*S("xX")*(hexdigit^0*period*hexdigit^1+hexdigit^1*period*hexdigit^0+hexdigit^1)*(S("pP")*sign^-1*hexdigit^1)^-1
+patterns.decafloat=sign^-1*(digit^0*period*digit^1+digit^1*period*digit^0+digit^1)*S("eE")*sign^-1*digit^1
+patterns.propername=(uppercase+lowercase+underscore)*(uppercase+lowercase+underscore+digit)^0*endofstring
+patterns.somecontent=(anything-newline-space)^1
+patterns.beginline=#(1-newline)
+patterns.longtostring=Cs(whitespace^0/""*((patterns.quoted+nonwhitespace^1+whitespace^1/""*(P(-1)+Cc(" ")))^0))
+local function anywhere(pattern)
+ return P { P(pattern)+1*V(1) }
+end
+lpeg.anywhere=anywhere
+function lpeg.instringchecker(p)
+ p=anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
+end
+function lpeg.splitter(pattern,action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+function lpeg.tsplitter(pattern,action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+local splitters_s,splitters_m,splitters_t={},{},{}
+local function splitat(separator,single)
+ local splitter=(single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator=P(separator)
+ local other=C((1-separator)^0)
+ if single then
+ local any=anything
+ splitter=other*(separator*C(any^0)+"")
+ splitters_s[separator]=splitter
+ else
+ splitter=other*(separator*other)^0
+ splitters_m[separator]=splitter
+ end
+ end
+ return splitter
+end
+local function tsplitat(separator)
+ local splitter=splitters_t[separator]
+ if not splitter then
+ splitter=Ct(splitat(separator))
+ splitters_t[separator]=splitter
+ end
+ return splitter
+end
+lpeg.splitat=splitat
+lpeg.tsplitat=tsplitat
+function string.splitup(str,separator)
+ if not separator then
+ separator=","
+ end
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
+end
+local cache={}
+function lpeg.split(separator,str)
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.split(str,separator)
+ if separator then
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
+ end
+end
+local spacing=patterns.spacer^0*newline
+local empty=spacing*Cc("")
+local nonempty=Cs((1-spacing)^1)*spacing^-1
+local content=(empty+nonempty)^1
+patterns.textline=content
+local linesplitter=tsplitat(newline)
+patterns.linesplitter=linesplitter
+function string.splitlines(str)
+ return lpegmatch(linesplitter,str)
+end
+local cache={}
+function lpeg.checkedsplit(separator,str)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.checkedsplit(str,separator)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end
+local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end
+local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end
+local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4
+patterns.utf8byte=utf8byte
+local cache={}
+function lpeg.stripper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs(((S(str)^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs(((str^1)/""+1)^0)
+ end
+end
+local cache={}
+function lpeg.keeper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs((((1-S(str))^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/""+1)^0)
+ end
+end
+function lpeg.frontstripper(str)
+ return (P(str)+P(true))*Cs(anything^0)
+end
+function lpeg.endstripper(str)
+ return Cs((1-P(str)*endofstring)^0)
+end
+function lpeg.replacer(one,two,makefunction,isutf)
+ local pattern
+ local u=isutf and utf8char or 1
+ if type(one)=="table" then
+ local no=#one
+ local p=P(false)
+ if no==0 then
+ for k,v in next,one do
+ p=p+P(k)/v
+ end
+ pattern=Cs((p+u)^0)
+ elseif no==1 then
+ local o=one[1]
+ one,two=P(o[1]),o[2]
+ pattern=Cs((one/two+u)^0)
+ else
+ for i=1,no do
+ local o=one[i]
+ p=p+P(o[1])/o[2]
+ end
+ pattern=Cs((p+u)^0)
+ end
+ else
+ pattern=Cs((P(one)/(two or "")+u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+function lpeg.finder(lst,makefunction,isutf)
+ local pattern
+ if type(lst)=="table" then
+ pattern=P(false)
+ if #lst==0 then
+ for k,v in next,lst do
+ pattern=pattern+P(k)
+ end
+ else
+ for i=1,#lst do
+ pattern=pattern+P(lst[i])
+ end
+ end
+ else
+ pattern=P(lst)
+ end
+ if isutf then
+ pattern=((utf8char or 1)-pattern)^0*pattern
+ else
+ pattern=(1-pattern)^0*pattern
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+local splitters_f,splitters_s={},{}
+function lpeg.firstofsplit(separator)
+ local splitter=splitters_f[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=C((1-pattern)^0)
+ splitters_f[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.secondofsplit(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=(1-pattern)^0*pattern*C(anything^0)
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+local splitters_s,splitters_p={},{}
+function lpeg.beforesuffix(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=C((1-pattern)^0)*pattern*endofstring
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.afterprefix(separator)
+ local splitter=splitters_p[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=pattern*C(anything^0)
+ splitters_p[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.balancer(left,right)
+ left,right=P(left),P(right)
+ return P { left*((1-left-right)+V(1))^0*right }
+end
+local nany=utf8char/""
+function lpeg.counter(pattern)
+ pattern=Cs((P(pattern)/" "+nany)^0)
+ return function(str)
+ return #lpegmatch(pattern,str)
+ end
+end
+utf=utf or (unicode and unicode.utf8) or {}
+local utfcharacters=utf and utf.characters or string.utfcharacters
+local utfgmatch=utf and utf.gmatch
+local utfchar=utf and utf.char
+lpeg.UP=lpeg.P
+if utfcharacters then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfcharacters(str) do
+ p=p+P(uc)
+ end
+ return p
+ end
+elseif utfgmatch then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfgmatch(str,".") do
+ p=p+P(uc)
+ end
+ return p
+ end
+else
+ function lpeg.US(str)
+ local p=P(false)
+ local f=function(uc)
+ p=p+P(uc)
+ end
+ lpegmatch((utf8char/f)^0,str)
+ return p
+ end
+end
+local range=utf8byte*utf8byte+Cc(false)
+function lpeg.UR(str,more)
+ local first,last
+ if type(str)=="number" then
+ first=str
+ last=more or first
+ else
+ first,last=lpegmatch(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first==last then
+ return P(str)
+ elseif utfchar and (last-first<8) then
+ local p=P(false)
+ for i=first,last do
+ p=p+P(utfchar(i))
+ end
+ return p
+ else
+ local f=function(b)
+ return b>=first and b<=last
+ end
+ return utf8byte/f
+ end
+end
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p)=="pattern"
+end
+function lpeg.oneof(list,...)
+ if type(list)~="table" then
+ list={ list,... }
+ end
+ local p=P(list[1])
+ for l=2,#list do
+ p=p+P(list[l])
+ end
+ return p
+end
+local sort=table.sort
+local function copyindexed(old)
+ local new={}
+ for i=1,#old do
+ new[i]=old
+ end
+ return new
+end
+local function sortedkeys(tab)
+ local keys,s={},0
+ for key,_ in next,tab do
+ s=s+1
+ keys[s]=key
+ end
+ sort(keys)
+ return keys
+end
+function lpeg.append(list,pp,delayed,checked)
+ local p=pp
+ if #list>0 then
+ local keys=copyindexed(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k=keys[i]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ elseif delayed then
+ local keys=sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k=keys[i]
+ local v=list[k]
+ p=P(k)/list+p
+ end
+ else
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ if p then
+ p=p/list
+ end
+ end
+ elseif checked then
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ if k==v then
+ p=P(k)+p
+ else
+ p=P(k)/v+p
+ end
+ else
+ if k==v then
+ p=P(k)
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ else
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)/v+p
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ return p
+end
+local p_false=P(false)
+local p_true=P(true)
+local function make(t)
+ local function making(t)
+ local p=p_false
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
+ else
+ p=p+P(k)*making(v)
+ end
+ end
+ end
+ if t[""] then
+ p=p+p_true
+ end
+ return p
+ end
+ local p=p_false
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
+ else
+ p=p+P(k)*making(v)
+ end
+ end
+ end
+ return p
+end
+local function collapse(t,x)
+ if type(t)~="table" then
+ return t,x
+ else
+ local n=next(t)
+ if n==nil then
+ return t,x
+ elseif next(t,n)==nil then
+ local k=n
+ local v=t[k]
+ if type(v)=="table" then
+ return collapse(v,x..k)
+ else
+ return v,x..k
+ end
+ else
+ local tt={}
+ for k,v in next,t do
+ local vv,kk=collapse(v,k)
+ tt[kk]=vv
+ end
+ return tt,x
+ end
+ end
+end
+function lpeg.utfchartabletopattern(list)
+ local tree={}
+ local n=#list
+ if n==0 then
+ for s in next,list do
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
+ end
+ end
+ else
+ for i=1,n do
+ local s=list[i]
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
+ end
+ end
+ end
+ return make(tree)
+end
+patterns.containseol=lpeg.finder(eol)
+local function nextstep(n,step,result)
+ local m=n%step
+ local d=floor(n/step)
+ if d>0 then
+ local v=V(tostring(step))
+ local s=result.start
+ for i=1,d do
+ if s then
+ s=v*s
+ else
+ s=v
+ end
+ end
+ result.start=s
+ end
+ if step>1 and result.start then
+ local v=V(tostring(step/2))
+ result[tostring(step)]=v*v
+ end
+ if step>0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
+end
+function lpeg.times(pattern,n)
+ return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
+end
+local trailingzeros=zero^0*-digit
+local case_1=period*trailingzeros/""
+local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
+local number=digit^1*(case_1+case_2)
+local stripper=Cs((number+1)^0)
+lpeg.patterns.stripzeros=stripper
+local byte_to_HEX={}
+local byte_to_hex={}
+local byte_to_dec={}
+local hex_to_byte={}
+for i=0,255 do
+ local H=format("%02X",i)
+ local h=format("%02x",i)
+ local d=format("%03i",i)
+ local c=char(i)
+ byte_to_HEX[c]=H
+ byte_to_hex[c]=h
+ byte_to_dec[c]=d
+ hex_to_byte[h]=c
+ hex_to_byte[H]=c
+end
+local hextobyte=P(2)/hex_to_byte
+local bytetoHEX=P(1)/byte_to_HEX
+local bytetohex=P(1)/byte_to_hex
+local bytetodec=P(1)/byte_to_dec
+local hextobytes=Cs(hextobyte^0)
+local bytestoHEX=Cs(bytetoHEX^0)
+local bytestohex=Cs(bytetohex^0)
+local bytestodec=Cs(bytetodec^0)
+patterns.hextobyte=hextobyte
+patterns.bytetoHEX=bytetoHEX
+patterns.bytetohex=bytetohex
+patterns.bytetodec=bytetodec
+patterns.hextobytes=hextobytes
+patterns.bytestoHEX=bytestoHEX
+patterns.bytestohex=bytestohex
+patterns.bytestodec=bytestodec
+function string.toHEX(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestoHEX,s)
+ end
+end
+function string.tohex(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestohex,s)
+ end
+end
+function string.todec(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestodec,s)
+ end
+end
+function string.tobytes(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(hextobytes,s)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-functions']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+functions=functions or {}
+function functions.dummy() end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-string']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local string=string
+local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs
+local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote
+function string.unquoted(str)
+ return lpegmatch(unquoted,str) or str
+end
+function string.quoted(str)
+ return format("%q",str)
+end
+function string.count(str,pattern)
+ local n=0
+ for _ in gmatch(str,pattern) do
+ n=n+1
+ end
+ return n
+end
+function string.limit(str,n,sentinel)
+ if #str>n then
+ sentinel=sentinel or "..."
+ return sub(str,1,(n-#sentinel))..sentinel
+ else
+ return str
+ end
+end
+local stripper=patterns.stripper
+local fullstripper=patterns.fullstripper
+local collapser=patterns.collapser
+local longtostring=patterns.longtostring
+function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+end
+function string.fullstrip(str)
+ return lpegmatch(fullstripper,str) or ""
+end
+function string.collapsespaces(str)
+ return lpegmatch(collapser,str) or ""
+end
+function string.longtostring(str)
+ return lpegmatch(longtostring,str) or ""
+end
+local pattern=P(" ")^0*P(-1)
+function string.is_empty(str)
+ if str=="" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+local anything=patterns.anything
+local allescapes=Cc("%")*S(".-+%?()[]*")
+local someescapes=Cc("%")*S(".-+%()[]")
+local matchescapes=Cc(".")*S("*?")
+local pattern_a=Cs ((allescapes+anything )^0 )
+local pattern_b=Cs ((someescapes+matchescapes+anything )^0 )
+local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") )
+function string.escapedpattern(str,simple)
+ return lpegmatch(simple and pattern_b or pattern_a,str)
+end
+function string.topattern(str,lowercase,strict)
+ if str=="" or type(str)~="string" then
+ return ".*"
+ elseif strict then
+ str=lpegmatch(pattern_c,str)
+ else
+ str=lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
+ end
+end
+function string.valid(str,default)
+ return (type(str)=="string" and str~="" and str) or default or nil
+end
+string.itself=function(s) return s end
+local pattern_c=Ct(C(1)^0)
+local pattern_b=Ct((C(1)/byte)^0)
+function string.totable(str,bytes)
+ return lpegmatch(bytes and pattern_b or pattern_c,str)
+end
+local replacer=lpeg.replacer("@","%%")
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+string.quote=string.quoted
+string.unquote=string.unquoted
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-table']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select
+local table,string=table,string
+local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove
+local format,lower,dump=string.format,string.lower,string.dump
+local getmetatable,setmetatable=getmetatable,setmetatable
+local getinfo=debug.getinfo
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local floor=math.floor
+local stripper=patterns.stripper
+function table.strip(tab)
+ local lst,l={},0
+ for i=1,#tab do
+ local s=lpegmatch(stripper,tab[i]) or ""
+ if s=="" then
+ else
+ l=l+1
+ lst[l]=s
+ end
+ end
+ return lst
+end
+function table.keys(t)
+ if t then
+ local keys,k={},0
+ for key in next,t do
+ k=k+1
+ keys[k]=key
+ end
+ return keys
+ else
+ return {}
+ end
+end
+local function compare(a,b)
+ local ta=type(a)
+ if ta=="number" then
+ local tb=type(b)
+ if ta==tb then
+ return a<b
+ elseif tb=="string" then
+ return tostring(a)<b
+ end
+ elseif ta=="string" then
+ local tb=type(b)
+ if ta==tb then
+ return a<b
+ else
+ return a<tostring(b)
+ end
+ end
+ return tostring(a)<tostring(b)
+end
+local function sortedkeys(tab)
+ if tab then
+ local srt,category,s={},0,0
+ for key in next,tab do
+ s=s+1
+ srt[s]=key
+ if category==3 then
+ elseif category==1 then
+ if type(key)~="string" then
+ category=3
+ end
+ elseif category==2 then
+ if type(key)~="number" then
+ category=3
+ end
+ else
+ local tkey=type(key)
+ if tkey=="string" then
+ category=1
+ elseif tkey=="number" then
+ category=2
+ else
+ category=3
+ end
+ end
+ end
+ if s<2 then
+ elseif category==3 then
+ sort(srt,compare)
+ else
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedhashonly(tab)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if type(key)=="string" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedindexonly(tab)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if type(key)=="number" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedhashkeys(tab,cmp)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if key then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt,cmp)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+function table.allkeys(t)
+ local keys={}
+ for k,v in next,t do
+ for k in next,v do
+ keys[k]=true
+ end
+ end
+ return sortedkeys(keys)
+end
+table.sortedkeys=sortedkeys
+table.sortedhashonly=sortedhashonly
+table.sortedindexonly=sortedindexonly
+table.sortedhashkeys=sortedhashkeys
+local function nothing() end
+local function sortedhash(t,cmp)
+ if t then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local m=#s
+ if m==1 then
+ return next,t
+ elseif m>0 then
+ local n=0
+ return function()
+ if n<m then
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
+ end
+ end
+ end
+ return nothing
+end
+table.sortedhash=sortedhash
+table.sortedpairs=sortedhash
+function table.append(t,list)
+ local n=#t
+ for i=1,#list do
+ n=n+1
+ t[n]=list[i]
+ end
+ return t
+end
+function table.prepend(t,list)
+ local nl=#list
+ local nt=nl+#t
+ for i=#t,1,-1 do
+ t[nt]=t[i]
+ nt=nt-1
+ end
+ for i=1,#list do
+ t[i]=list[i]
+ end
+ return t
+end
+function table.merge(t,...)
+ t=t or {}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
+end
+function table.merged(...)
+ local t={}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
+end
+function table.imerge(t,...)
+ local nt=#t
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ nt=nt+1
+ t[nt]=nst[j]
+ end
+ end
+ return t
+end
+function table.imerged(...)
+ local tmp,ntmp={},0
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ ntmp=ntmp+1
+ tmp[ntmp]=nst[j]
+ end
+ end
+ return tmp
+end
+local function fastcopy(old,metatabletoo)
+ if old then
+ local new={}
+ for k,v in next,old do
+ if type(v)=="table" then
+ new[k]=fastcopy(v,metatabletoo)
+ else
+ new[k]=v
+ end
+ end
+ if metatabletoo then
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
+ end
+ return new
+ else
+ return {}
+ end
+end
+local function copy(t,tables)
+ tables=tables or {}
+ local tcopy={}
+ if not tables[t] then
+ tables[t]=tcopy
+ end
+ for i,v in next,t do
+ if type(i)=="table" then
+ if tables[i] then
+ i=tables[i]
+ else
+ i=copy(i,tables)
+ end
+ end
+ if type(v)~="table" then
+ tcopy[i]=v
+ elseif tables[v] then
+ tcopy[i]=tables[v]
+ else
+ tcopy[i]=copy(v,tables)
+ end
+ end
+ local mt=getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
+ return tcopy
+end
+table.fastcopy=fastcopy
+table.copy=copy
+function table.derive(parent)
+ local child={}
+ if parent then
+ setmetatable(child,{ __index=parent })
+ end
+ return child
+end
+function table.tohash(t,value)
+ local h={}
+ if t then
+ if value==nil then value=true end
+ for _,v in next,t do
+ h[v]=value
+ end
+ end
+ return h
+end
+function table.fromhash(t)
+ local hsh,h={},0
+ for k,v in next,t do
+ if v then
+ h=h+1
+ hsh[h]=k
+ end
+ end
+ return hsh
+end
+local noquotes,hexify,handle,compact,inline,functions
+local reserved=table.tohash {
+ 'and','break','do','else','elseif','end','false','for','function','if',
+ 'in','local','nil','not','or','repeat','return','then','true','until','while',
+ 'NaN','goto',
+}
+local function simple_table(t)
+ local nt=#t
+ if nt>0 then
+ local n=0
+ for _,v in next,t do
+ n=n+1
+ end
+ if n==nt then
+ local tt={}
+ for i=1,nt do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="number" then
+ if hexify then
+ tt[i]=format("0x%X",v)
+ else
+ tt[i]=tostring(v)
+ end
+ elseif tv=="string" then
+ tt[i]=format("%q",v)
+ elseif tv=="boolean" then
+ tt[i]=v and "true" or "false"
+ else
+ return nil
+ end
+ end
+ return tt
+ end
+ end
+ return nil
+end
+local propername=patterns.propername
+local function dummy() end
+local function do_serialize(root,name,depth,level,indexed)
+ if level>0 then
+ depth=depth.." "
+ if indexed then
+ handle(format("%s{",depth))
+ else
+ local tn=type(name)
+ if tn=="number" then
+ if hexify then
+ handle(format("%s[0x%X]={",depth,name))
+ else
+ handle(format("%s[%s]={",depth,name))
+ end
+ elseif tn=="string" then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
+ handle(format("%s%s={",depth,name))
+ else
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn=="boolean" then
+ handle(format("%s[%s]={",depth,name and "true" or "false"))
+ else
+ handle(format("%s{",depth))
+ end
+ end
+ end
+ if root and next(root)~=nil then
+ local first,last=nil,0
+ if compact then
+ last=#root
+ for k=1,last do
+ if root[k]==nil then
+ last=k-1
+ break
+ end
+ end
+ if last>0 then
+ first=1
+ end
+ end
+ local sk=sortedkeys(root)
+ for i=1,#sk do
+ local k=sk[i]
+ local v=root[k]
+ local tv=type(v)
+ local tk=type(k)
+ if compact and first and tk=="number" and k>=first and k<=last then
+ if tv=="number" then
+ if hexify then
+ handle(format("%s 0x%X,",depth,v))
+ else
+ handle(format("%s %s,",depth,v))
+ end
+ elseif tv=="string" then
+ handle(format("%s %q,",depth,v))
+ elseif tv=="table" then
+ if next(v)==nil then
+ handle(format("%s {},",depth))
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ handle(format("%s { %s },",depth,concat(st,", ")))
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ elseif tv=="boolean" then
+ handle(format("%s %s,",depth,v and "true" or "false"))
+ elseif tv=="function" then
+ if functions then
+ handle(format('%s load(%q),',depth,dump(v)))
+ else
+ handle(format('%s "function",',depth))
+ end
+ else
+ handle(format("%s %q,",depth,tostring(v)))
+ end
+ elseif k=="__p__" then
+ if false then
+ handle(format("%s __p__=nil,",depth))
+ end
+ elseif tv=="number" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=0x%X,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v))
+ else
+ handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
+ end
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ if hexify then
+ handle(format("%s %s=0x%X,",depth,k,v))
+ else
+ handle(format("%s %s=%s,",depth,k,v))
+ end
+ else
+ if hexify then
+ handle(format("%s [%q]=0x%X,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ end
+ elseif tv=="string" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,v))
+ else
+ handle(format("%s [%s]=%q,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
+ else
+ handle(format("%s [%q]=%q,",depth,k,v))
+ end
+ elseif tv=="table" then
+ if next(v)==nil then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]={},",depth,k))
+ else
+ handle(format("%s [%s]={},",depth,k))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={},",depth,k and "true" or "false"))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={},",depth,k))
+ else
+ handle(format("%s [%q]={},",depth,k))
+ end
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", ")))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ elseif tv=="boolean" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false"))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false"))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v and "true" or "false"))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v and "true" or "false"))
+ end
+ elseif tv=="function" then
+ if functions then
+ local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%s]=load(%q),",depth,k,f))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%q]=load(%q),",depth,k,f))
+ end
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%q,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%q]=%q,",depth,k,tostring(v)))
+ end
+ end
+ end
+ end
+ if level>0 then
+ handle(format("%s},",depth))
+ end
+end
+local function serialize(_handle,root,name,specification)
+ local tname=type(name)
+ if type(specification)=="table" then
+ noquotes=specification.noquotes
+ hexify=specification.hexify
+ handle=_handle or specification.handle or print
+ functions=specification.functions
+ compact=specification.compact
+ inline=specification.inline and compact
+ if functions==nil then
+ functions=true
+ end
+ if compact==nil then
+ compact=true
+ end
+ if inline==nil then
+ inline=compact
+ end
+ else
+ noquotes=false
+ hexify=false
+ handle=_handle or print
+ compact=true
+ inline=true
+ functions=true
+ end
+ if tname=="string" then
+ if name=="return" then
+ handle("return {")
+ else
+ handle(name.."={")
+ end
+ elseif tname=="number" then
+ if hexify then
+ handle(format("[0x%X]={",name))
+ else
+ handle("["..name.."]={")
+ end
+ elseif tname=="boolean" then
+ if name then
+ handle("return {")
+ else
+ handle("{")
+ end
+ else
+ handle("t={")
+ end
+ if root then
+ if getmetatable(root) then
+ local dummy=root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_=nil
+ end
+ if next(root)~=nil then
+ do_serialize(root,name,"",0)
+ end
+ end
+ handle("}")
+end
+function table.serialize(root,name,specification)
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ end
+ serialize(flush,root,name,specification)
+ return concat(t,"\n")
+end
+table.tohandle=serialize
+local maxtab=2*1024
+function table.tofile(filename,root,name,specification)
+ local f=io.open(filename,'w')
+ if f then
+ if maxtab>1 then
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ if n>maxtab then
+ f:write(concat(t,"\n"),"\n")
+ t,n={},0
+ end
+ end
+ serialize(flush,root,name,specification)
+ f:write(concat(t,"\n"),"\n")
+ else
+ local function flush(s)
+ f:write(s,"\n")
+ end
+ serialize(flush,root,name,specification)
+ end
+ f:close()
+ io.flush()
+ end
+end
+local function flattened(t,f,depth)
+ if f==nil then
+ f={}
+ depth=0xFFFF
+ elseif tonumber(f) then
+ depth=f
+ f={}
+ elseif not depth then
+ depth=0xFFFF
+ end
+ for k,v in next,t do
+ if type(k)~="number" then
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1]=v
+ end
+ end
+ end
+ for k=1,#t do
+ local v=t[k]
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+table.flattened=flattened
+local function unnest(t,f)
+ if not f then
+ f={}
+ end
+ for i=1,#t do
+ local v=t[i]
+ if type(v)=="table" then
+ if type(v[1])=="table" then
+ unnest(v,f)
+ else
+ f[#f+1]=v
+ end
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+function table.unnest(t)
+ return unnest(t)
+end
+local function are_equal(a,b,n,m)
+ if a and b and #a==#b then
+ n=n or 1
+ m=m or #a
+ for i=n,m do
+ local ai,bi=a[i],b[i]
+ if ai==bi then
+ elseif type(ai)=="table" and type(bi)=="table" then
+ if not are_equal(ai,bi) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+ else
+ return false
+ end
+end
+local function identical(a,b)
+ for ka,va in next,a do
+ local vb=b[ka]
+ if va==vb then
+ elseif type(va)=="table" and type(vb)=="table" then
+ if not identical(va,vb) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+end
+table.identical=identical
+table.are_equal=are_equal
+local function sparse(old,nest,keeptables)
+ local new={}
+ for k,v in next,old do
+ if not (v=="" or v==false) then
+ if nest and type(v)=="table" then
+ v=sparse(v,nest)
+ if keeptables or next(v)~=nil then
+ new[k]=v
+ end
+ else
+ new[k]=v
+ end
+ end
+ end
+ return new
+end
+table.sparse=sparse
+function table.compact(t)
+ return sparse(t,true,true)
+end
+function table.contains(t,v)
+ if t then
+ for i=1,#t do
+ if t[i]==v then
+ return i
+ end
+ end
+ end
+ return false
+end
+function table.count(t)
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ end
+ return n
+end
+function table.swapped(t,s)
+ local n={}
+ if s then
+ for k,v in next,s do
+ n[k]=v
+ end
+ end
+ for k,v in next,t do
+ n[v]=k
+ end
+ return n
+end
+function table.mirrored(t)
+ local n={}
+ for k,v in next,t do
+ n[v]=k
+ n[k]=v
+ end
+ return n
+end
+function table.reversed(t)
+ if t then
+ local tt,tn={},#t
+ if tn>0 then
+ local ttn=0
+ for i=tn,1,-1 do
+ ttn=ttn+1
+ tt[ttn]=t[i]
+ end
+ end
+ return tt
+ end
+end
+function table.reverse(t)
+ if t then
+ local n=#t
+ for i=1,floor(n/2) do
+ local j=n-i+1
+ t[i],t[j]=t[j],t[i]
+ end
+ return t
+ end
+end
+function table.sequenced(t,sep,simple)
+ if not t then
+ return ""
+ end
+ local n=#t
+ local s={}
+ if n>0 then
+ for i=1,n do
+ s[i]=tostring(t[i])
+ end
+ else
+ n=0
+ for k,v in sortedhash(t) do
+ if simple then
+ if v==true then
+ n=n+1
+ s[n]=k
+ elseif v and v~="" then
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ else
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ end
+ end
+ return concat(s,sep or " | ")
+end
+function table.print(t,...)
+ if type(t)~="table" then
+ print(tostring(t))
+ else
+ serialize(print,t,...)
+ end
+end
+if setinspector then
+ setinspector("table",function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+end
+function table.sub(t,i,j)
+ return { unpack(t,i,j) }
+end
+function table.is_empty(t)
+ return not t or next(t)==nil
+end
+function table.has_one_entry(t)
+ return t and next(t,next(t))==nil
+end
+function table.loweredkeys(t)
+ local l={}
+ for k,v in next,t do
+ l[lower(k)]=v
+ end
+ return l
+end
+function table.unique(old)
+ local hash={}
+ local new={}
+ local n=0
+ for i=1,#old do
+ local oi=old[i]
+ if not hash[oi] then
+ n=n+1
+ new[n]=oi
+ hash[oi]=true
+ end
+ end
+ return new
+end
+function table.sorted(t,...)
+ sort(t,...)
+ return t
+end
+function table.values(t,s)
+ if t then
+ local values,keys,v={},{},0
+ for key,value in next,t do
+ if not keys[value] then
+ v=v+1
+ values[v]=value
+ keys[k]=key
+ end
+ end
+ if s then
+ sort(values)
+ end
+ return values
+ else
+ return {}
+ end
+end
+function table.filtered(t,pattern,sort,cmp)
+ if t and type(pattern)=="string" then
+ if sort then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local n=0
+ local m=#s
+ local function kv(s)
+ while n<m do
+ n=n+1
+ local k=s[n]
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return kv,s
+ else
+ local n=next(t)
+ local function iterator()
+ while n~=nil do
+ local k=n
+ n=next(t,k)
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return iterator,t
+ end
+ else
+ return nothing
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-io']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local io=io
+local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
+local concat=table.concat
+local floor=math.floor
+local type=type
+if string.find(os.getenv("PATH"),";",1,true) then
+ io.fileseparator,io.pathseparator="\\",";"
+else
+ io.fileseparator,io.pathseparator="/",":"
+end
+local function readall(f)
+ return f:read("*all")
+end
+local function readall(f)
+ local size=f:seek("end")
+ if size==0 then
+ return ""
+ elseif size<1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done=f:seek("set",0)
+ local step
+ if size<1024*1024 then
+ step=1024*1024
+ elseif size>16*1024*1024 then
+ step=16*1024*1024
+ else
+ step=floor(size/(1024*1024))*1024*1024/8
+ end
+ local data={}
+ while true do
+ local r=f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1]=r
+ end
+ end
+ end
+end
+io.readall=readall
+function io.loaddata(filename,textmode)
+ local f=io.open(filename,(textmode and 'r') or 'rb')
+ if f then
+ local data=readall(f)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
+end
+function io.savedata(filename,data,joiner)
+ local f=io.open(filename,"wb")
+ if f then
+ if type(data)=="table" then
+ f:write(concat(data,joiner or ""))
+ elseif type(data)=="function" then
+ data(f)
+ else
+ f:write(data or "")
+ end
+ f:close()
+ io.flush()
+ return true
+ else
+ return false
+ end
+end
+function io.loadlines(filename,n)
+ local f=io.open(filename,'r')
+ if not f then
+ elseif n then
+ local lines={}
+ for i=1,n do
+ local line=f:read("*lines")
+ if line then
+ lines[#lines+1]=line
+ else
+ break
+ end
+ end
+ f:close()
+ lines=concat(lines,"\n")
+ if #lines>0 then
+ return lines
+ end
+ else
+ local line=f:read("*line") or ""
+ f:close()
+ if #line>0 then
+ return line
+ end
+ end
+end
+function io.loadchunk(filename,n)
+ local f=io.open(filename,'rb')
+ if f then
+ local data=f:read(n or 1024)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
+end
+function io.exists(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return false
+ else
+ f:close()
+ return true
+ end
+end
+function io.size(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return 0
+ else
+ local s=f:seek("end")
+ f:close()
+ return s
+ end
+end
+function io.noflines(f)
+ if type(f)=="string" then
+ local f=io.open(filename)
+ if f then
+ local n=f and io.noflines(f) or 0
+ f:close()
+ return n
+ else
+ return 0
+ end
+ else
+ local n=0
+ for _ in f:lines() do
+ n=n+1
+ end
+ f:seek('set',0)
+ return n
+ end
+end
+local nextchar={
+ [ 4]=function(f)
+ return f:read(1,1,1,1)
+ end,
+ [ 2]=function(f)
+ return f:read(1,1)
+ end,
+ [ 1]=function(f)
+ return f:read(1)
+ end,
+ [-2]=function(f)
+ local a,b=f:read(1,1)
+ return b,a
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ return d,c,b,a
+ end
+}
+function io.characters(f,n)
+ if f then
+ return nextchar[n or 1],f
+ end
+end
+local nextbyte={
+ [4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(a),byte(b),byte(c),byte(d)
+ end
+ end,
+ [3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(a),byte(b),byte(c)
+ end
+ end,
+ [2]=function(f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(a),byte(b)
+ end
+ end,
+ [1]=function (f)
+ local a=f:read(1)
+ if a then
+ return byte(a)
+ end
+ end,
+ [-2]=function (f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(b),byte(a)
+ end
+ end,
+ [-3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(c),byte(b),byte(a)
+ end
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(d),byte(c),byte(b),byte(a)
+ end
+ end
+}
+function io.bytes(f,n)
+ if f then
+ return nextbyte[n or 1],f
+ else
+ return nil,nil
+ end
+end
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(format(" [%s]",concat(options,"|")))
+ end
+ if default then
+ io.write(format(" [%s]",default))
+ end
+ io.write(format(" "))
+ io.flush()
+ local answer=io.read()
+ answer=gsub(answer,"^%s*(.*)%s*$","%1")
+ if answer=="" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for k=1,#options do
+ if options[k]==answer then
+ return answer
+ end
+ end
+ local pattern="^"..answer
+ for k=1,#options do
+ local v=options[k]
+ if find(v,pattern) then
+ return v
+ end
+ end
+ end
+ end
+end
+local function readnumber(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ if n==1 then
+ return byte(f:read(1))
+ elseif n==2 then
+ local a,b=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==3 then
+ local a,b,c=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==4 then
+ local a,b,c,d=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==8 then
+ local a,b=readnumber(f,4),readnumber(f,4)
+ return 256*a+b
+ elseif n==12 then
+ local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4)
+ return 256*256*a+256*b+c
+ elseif n==-2 then
+ local b,a=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==-3 then
+ local c,b,a=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==-4 then
+ local d,c,b,a=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==-8 then
+ local h,g,f,e,d,c,b,a=byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h
+ else
+ return 0
+ end
+end
+io.readnumber=readnumber
+function io.readstring(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ local str=gsub(f:read(n),"\000","")
+ return str
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-file']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+file=file or {}
+local file=file
+if not lfs then
+ lfs=optionalrequire("lfs")
+end
+local insert,concat=table.insert,table.concat
+local match,find,gmatch=string.match,string.find,string.gmatch
+local lpegmatch=lpeg.match
+local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
+local checkedsplit=string.checkedsplit
+local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local tricky=S("/\\")*P(-1)
+local attributes=lfs.attributes
+if sandbox then
+ sandbox.redefine(lfs.isfile,"lfs.isfile")
+ sandbox.redefine(lfs.isdir,"lfs.isdir")
+end
+function lfs.isdir(name)
+ if lpegmatch(tricky,name) then
+ return attributes(name,"mode")=="directory"
+ else
+ return attributes(name.."/.","mode")=="directory"
+ end
+end
+function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+end
+local colon=P(":")
+local period=P(".")
+local periods=P("..")
+local fwslash=P("/")
+local bwslash=P("\\")
+local slashes=S("\\/")
+local noperiod=1-period
+local noslashes=1-slashes
+local name=noperiod^1
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1)
+local function pathpart(name,default)
+ return name and lpegmatch(pattern,name) or default or ""
+end
+local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1
+local function basename(name)
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0
+local function nameonly(name)
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1
+local function suffixonly(name)
+ return name and lpegmatch(pattern,name) or ""
+end
+local pattern=(noslashes^0*slashes)^0*noperiod^1*((period*C(noperiod^1))^1)*-1+Cc("")
+local function suffixesonly(name)
+ if name then
+ return lpegmatch(pattern,name)
+ else
+ return ""
+ end
+end
+file.pathpart=pathpart
+file.basename=basename
+file.nameonly=nameonly
+file.suffixonly=suffixonly
+file.suffix=suffixonly
+file.suffixesonly=suffixesonly
+file.suffixes=suffixesonly
+file.dirname=pathpart
+file.extname=suffixonly
+local drive=C(R("az","AZ"))*colon
+local path=C((noslashes^0*slashes)^0)
+local suffix=period*C(P(1-period)^0*P(-1))
+local base=C((1-suffix)^0)
+local rest=C(P(1)^0)
+drive=drive+Cc("")
+path=path+Cc("")
+base=base+Cc("")
+suffix=suffix+Cc("")
+local pattern_a=drive*path*base*suffix
+local pattern_b=path*base*suffix
+local pattern_c=C(drive*path)*C(base*suffix)
+local pattern_d=path*rest
+function file.splitname(str,splitdrive)
+ if not str then
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str)
+ else
+ return lpegmatch(pattern_b,str)
+ end
+end
+function file.splitbase(str)
+ if str then
+ return lpegmatch(pattern_d,str)
+ else
+ return "",str
+ end
+end
+function file.nametotable(str,splitdrive)
+ if str then
+ local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path=path,
+ drive=drive,
+ subpath=subpath,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ else
+ return {
+ path=path,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ end
+ end
+end
+local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1)
+function file.removesuffix(name)
+ return name and lpegmatch(pattern,name)
+end
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix)
+function file.addsuffix(filename,suffix,criterium)
+ if not filename or not suffix or suffix=="" then
+ return filename
+ elseif criterium==true then
+ return filename.."."..suffix
+ elseif not criterium then
+ local n,s=lpegmatch(pattern,filename)
+ if not s or s=="" then
+ return filename.."."..suffix
+ else
+ return filename
+ end
+ else
+ local n,s=lpegmatch(pattern,filename)
+ if s and s~="" then
+ local t=type(criterium)
+ if t=="table" then
+ for i=1,#criterium do
+ if s==criterium[i] then
+ return filename
+ end
+ end
+ elseif t=="string" then
+ if s==criterium then
+ return filename
+ end
+ end
+ end
+ return (n or filename).."."..suffix
+ end
+end
+local suffix=period*(1-period-slashes)^1*-1
+local pattern=Cs((1-suffix)^0)
+function file.replacesuffix(name,suffix)
+ if name and suffix and suffix~="" then
+ return lpegmatch(pattern,name).."."..suffix
+ else
+ return name
+ end
+end
+local reslasher=lpeg.replacer(P("\\"),"/")
+function file.reslash(str)
+ return str and lpegmatch(reslasher,str)
+end
+function file.is_writable(name)
+ if not name then
+ elseif lfs.isdir(name) then
+ name=name.."/m_t_x_t_e_s_t.tmp"
+ local f=io.open(name,"wb")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ elseif lfs.isfile(name) then
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ return true
+ end
+ else
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ end
+ return false
+end
+local readable=P("r")*Cc(true)
+function file.is_readable(name)
+ if name then
+ local a=attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
+end
+file.isreadable=file.is_readable
+file.iswritable=file.is_writable
+function file.size(name)
+ if name then
+ local a=attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
+function file.splitpath(str,separator)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
+end
+function file.joinpath(tab,separator)
+ return tab and concat(tab,separator or io.pathseparator)
+end
+local someslash=S("\\/")
+local stripper=Cs(P(fwslash)^0/""*reslasher)
+local isnetwork=someslash*someslash*(1-someslash)+(1-fwslash-colon)^1*colon
+local isroot=fwslash^1*-1
+local hasroot=fwslash^1
+local reslasher=lpeg.replacer(S("\\/"),"/")
+local deslasher=lpeg.replacer(S("\\/")^1,"/")
+function file.join(one,two,three,...)
+ if not two then
+ return one=="" and one or lpegmatch(stripper,one)
+ end
+ if one=="" then
+ return lpegmatch(stripper,three and concat({ two,three,... },"/") or two)
+ end
+ if lpegmatch(isnetwork,one) then
+ local one=lpegmatch(reslasher,one)
+ local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two)
+ if lpegmatch(hasroot,two) then
+ return one..two
+ else
+ return one.."/"..two
+ end
+ elseif lpegmatch(isroot,one) then
+ local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two)
+ if lpegmatch(hasroot,two) then
+ return two
+ else
+ return "/"..two
+ end
+ else
+ return lpegmatch(deslasher,concat({ one,two,three,... },"/"))
+ end
+end
+local drivespec=R("az","AZ")^1*colon
+local anchors=fwslash+drivespec
+local untouched=periods+(1-period)^1*P(-1)
+local mswindrive=Cs(drivespec*(bwslash/"/"+fwslash)^0)
+local mswinuncpath=(bwslash+fwslash)*(bwslash+fwslash)*Cc("//")
+local splitstarter=(mswindrive+mswinuncpath+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
+local absolute=fwslash
+function file.collapsepath(str,anchor)
+ if not str then
+ return
+ end
+ if anchor==true and not lpegmatch(anchors,str) then
+ str=getcurrentdir().."/"..str
+ end
+ if str=="" or str=="." then
+ return "."
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
+ end
+ local starter,oldelements=lpegmatch(splitstarter,str)
+ local newelements={}
+ local i=#oldelements
+ while i>0 do
+ local element=oldelements[i]
+ if element=='.' then
+ elseif element=='..' then
+ local n=i-1
+ while n>0 do
+ local element=oldelements[n]
+ if element~='..' and element~='.' then
+ oldelements[n]='.'
+ break
+ else
+ n=n-1
+ end
+ end
+ if n<1 then
+ insert(newelements,1,'..')
+ end
+ elseif element~="" then
+ insert(newelements,1,element)
+ end
+ i=i-1
+ end
+ if #newelements==0 then
+ return starter or "."
+ elseif starter then
+ return starter..concat(newelements,'/')
+ elseif lpegmatch(absolute,str) then
+ return "/"..concat(newelements,'/')
+ else
+ newelements=concat(newelements,'/')
+ if anchor=="." and find(str,"^%./") then
+ return "./"..newelements
+ else
+ return newelements
+ end
+ end
+end
+local validchars=R("az","09","AZ","--","..")
+local pattern_a=lpeg.replacer(1-validchars)
+local pattern_a=Cs((validchars+P(1)/"-")^1)
+local whatever=P("-")^0/""
+local pattern_b=Cs(whatever*(1-whatever*-1)^1)
+function file.robustname(str,strict)
+ if str then
+ str=lpegmatch(pattern_a,str) or str
+ if strict then
+ return lpegmatch(pattern_b,str) or str
+ else
+ return str
+ end
+ end
+end
+file.readdata=io.loaddata
+file.savedata=io.savedata
+function file.copy(oldname,newname)
+ if oldname and newname then
+ local data=io.loaddata(oldname)
+ if data and data~="" then
+ file.savedata(newname,data)
+ end
+ end
+end
+local letter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash
+local rootbased=fwslash+letter*colon
+lpeg.patterns.qualified=qualified
+lpeg.patterns.rootbased=rootbased
+function file.is_qualified_path(filename)
+ return filename and lpegmatch(qualified,filename)~=nil
+end
+function file.is_rootbased_path(filename)
+ return filename and lpegmatch(rootbased,filename)~=nil
+end
+function file.strip(name,dir)
+ if name then
+ local b,a=match(name,"^(.-)"..dir.."(.*)$")
+ return a~="" and a or name
+ end
+end
+function lfs.mkdirs(path)
+ local full=""
+ for sub in gmatch(path,"(/*[^\\/]+)") do
+ full=full..sub
+ lfs.mkdir(full)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-boolean']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber=type,tonumber
+boolean=boolean or {}
+local boolean=boolean
+function boolean.tonumber(b)
+ if b then return 1 else return 0 end
+end
+function toboolean(str,tolerant)
+ if str==nil then
+ return false
+ elseif str==false then
+ return false
+ elseif str==true then
+ return true
+ elseif str=="true" then
+ return true
+ elseif str=="false" then
+ return false
+ elseif not tolerant then
+ return false
+ elseif str==0 then
+ return false
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+string.toboolean=toboolean
+function string.booleanstring(str)
+ if str=="0" then
+ return false
+ elseif str=="1" then
+ return true
+ elseif str=="" then
+ return false
+ elseif str=="false" then
+ return false
+ elseif str=="true" then
+ return true
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+function string.is_boolean(str,default,strict)
+ if type(str)=="string" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" or (not strict and str=="1") then
+ return true
+ elseif str=="false" or str=="no" or str=="off" or str=="f" or (not strict and str=="0") then
+ return false
+ end
+ end
+ return default
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-math']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.ceiling then
+ math.ceiling=math.ceil
+end
+if not math.round then
+ function math.round(x) return floor(x+0.5) end
+end
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
+end
+if not math.mod then
+ function math.mod(n,m) return n%m end
+end
+local pipi=2*math.pi/360
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
+end
+if not math.odd then
+ function math.odd (n) return n%2~=0 end
+ function math.even(n) return n%2==0 end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['util-str']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.strings=utilities.strings or {}
+local strings=utilities.strings
+local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub
+local load,dump=load,string.dump
+local tonumber,type,tostring=tonumber,type,tostring
+local unpack,concat=table.unpack,table.concat
+local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
+local patterns,lpegmatch=lpeg.patterns,lpeg.match
+local utfchar,utfbyte=utf.char,utf.byte
+local loadstripped=nil
+if _LUAVERSION<5.2 then
+ loadstripped=function(str,shortcuts)
+ return load(str)
+ end
+else
+ loadstripped=function(str,shortcuts)
+ if shortcuts then
+ return load(dump(load(str),true),nil,nil,shortcuts)
+ else
+ return load(dump(load(str),true))
+ end
+ end
+end
+if not number then number={} end
+local stripper=patterns.stripzeros
+local newline=patterns.newline
+local endofstring=patterns.endofstring
+local whitespace=patterns.whitespace
+local spacer=patterns.spacer
+local spaceortab=patterns.spaceortab
+local function points(n)
+ n=tonumber(n)
+ return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
+end
+local function basepoints(n)
+ n=tonumber(n)
+ return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
+end
+number.points=points
+number.basepoints=basepoints
+local rubish=spaceortab^0*newline
+local anyrubish=spaceortab+newline
+local anything=patterns.anything
+local stripped=(spaceortab^1/"")*newline
+local leading=rubish^0/""
+local trailing=(anyrubish^1*endofstring)/""
+local redundant=rubish^3/"\n"
+local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
+function strings.collapsecrlf(str)
+ return lpegmatch(pattern,str)
+end
+local repeaters={}
+function strings.newrepeater(str,offset)
+ offset=offset or 0
+ local s=repeaters[str]
+ if not s then
+ s={}
+ repeaters[str]=s
+ end
+ local t=s[offset]
+ if t then
+ return t
+ end
+ t={}
+ setmetatable(t,{ __index=function(t,k)
+ if not k then
+ return ""
+ end
+ local n=k+offset
+ local s=n>0 and rep(str,n) or ""
+ t[k]=s
+ return s
+ end })
+ s[offset]=t
+ return t
+end
+local extra,tab,start=0,0,4,0
+local nspaces=strings.newrepeater(" ")
+string.nspaces=nspaces
+local pattern=Carg(1)/function(t)
+ extra,tab,start=0,t or 7,1
+ end*Cs((
+ Cp()*patterns.tab/function(position)
+ local current=(position-start+1)+extra
+ local spaces=tab-(current-1)%tab
+ if spaces>0 then
+ extra=extra+spaces-1
+ return nspaces[spaces]
+ else
+ return ""
+ end
+ end+newline*Cp()/function(position)
+ extra,start=0,position
+ end+patterns.anything
+ )^1)
+function strings.tabtospace(str,tab)
+ return lpegmatch(pattern,str,1,tab or 7)
+end
+local space=spacer^0
+local nospace=space/""
+local endofline=nospace*newline
+local stripend=(whitespace^1*endofstring)/""
+local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace)
+local stripempty=endofline^1/""
+local normalempty=endofline^1
+local singleempty=endofline*(endofline^0/"")
+local doubleempty=endofline*endofline^-1*(endofline^0/"")
+local stripstart=stripempty^0
+local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 )
+local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 )
+local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 )
+local p_retain_normal=Cs ((normalline+normalempty )^0 )
+local p_retain_collapse=Cs ((normalline+doubleempty )^0 )
+local p_retain_noempty=Cs ((normalline+singleempty )^0 )
+local striplinepatterns={
+ ["prune"]=p_prune_normal,
+ ["prune and collapse"]=p_prune_collapse,
+ ["prune and no empty"]=p_prune_noempty,
+ ["retain"]=p_retain_normal,
+ ["retain and collapse"]=p_retain_collapse,
+ ["retain and no empty"]=p_retain_noempty,
+ ["collapse"]=patterns.collapser,
+}
+setmetatable(striplinepatterns,{ __index=function(t,k) return p_prune_collapse end })
+strings.striplinepatterns=striplinepatterns
+function strings.striplines(str,how)
+ return str and lpegmatch(striplinepatterns[how],str) or str
+end
+strings.striplong=strings.striplines
+function strings.nice(str)
+ str=gsub(str,"[:%-+_]+"," ")
+ return str
+end
+local n=0
+local sequenced=table.sequenced
+function string.autodouble(s,sep)
+ if s==nil then
+ return '""'
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ('"'..sequenced(s,sep or ",")..'"')
+ end
+ return ('"'..tostring(s)..'"')
+end
+function string.autosingle(s,sep)
+ if s==nil then
+ return "''"
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ("'"..sequenced(s,sep or ",").."'")
+ end
+ return ("'"..tostring(s).."'")
+end
+local tracedchars={ [0]=
+ "[null]","[soh]","[stx]","[etx]","[eot]","[enq]","[ack]","[bel]",
+ "[bs]","[ht]","[lf]","[vt]","[ff]","[cr]","[so]","[si]",
+ "[dle]","[dc1]","[dc2]","[dc3]","[dc4]","[nak]","[syn]","[etb]",
+ "[can]","[em]","[sub]","[esc]","[fs]","[gs]","[rs]","[us]",
+ "[space]",
+}
+string.tracedchars=tracedchars
+strings.tracers=tracedchars
+function string.tracedchar(b)
+ if type(b)=="number" then
+ return tracedchars[b] or (utfchar(b).." (U+"..format("%05X",b)..")")
+ else
+ local c=utfbyte(b)
+ return tracedchars[c] or (b.." (U+"..(c and format("%05X",c) or "?????")..")")
+ end
+end
+function number.signed(i)
+ if i>0 then
+ return "+",i
+ else
+ return "-",-i
+ end
+end
+local zero=P("0")^1/""
+local plus=P("+")/""
+local minus=P("-")
+local separator=S(".")
+local digit=R("09")
+local trailing=zero^1*#S("eE")
+local exponent=(S("eE")*(plus+Cs((minus*zero^0*P(-1))/"")+minus)*zero^0*(P(-1)*Cc("0")+P(1)^1))
+local pattern_a=Cs(minus^0*digit^1*(separator/""*trailing+separator*(trailing+digit)^0)*exponent)
+local pattern_b=Cs((exponent+P(1))^0)
+function number.sparseexponent(f,n)
+ if not n then
+ n=f
+ f="%e"
+ end
+ local tn=type(n)
+ if tn=="string" then
+ local m=tonumber(n)
+ if m then
+ return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,m))
+ end
+ elseif tn=="number" then
+ return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,n))
+ end
+ return tostring(n)
+end
+local template=[[
+%s
+%s
+return function(%s) return %s end
+]]
+local preamble,environment="",{}
+if _LUAVERSION<5.2 then
+ preamble=[[
+local lpeg=lpeg
+local type=type
+local tostring=tostring
+local tonumber=tonumber
+local format=string.format
+local concat=table.concat
+local signed=number.signed
+local points=number.points
+local basepoints= number.basepoints
+local utfchar=utf.char
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local nspaces=string.nspaces
+local tracedchar=string.tracedchar
+local autosingle=string.autosingle
+local autodouble=string.autodouble
+local sequenced=table.sequenced
+local formattednumber=number.formatted
+local sparseexponent=number.sparseexponent
+ ]]
+else
+ environment={
+ global=global or _G,
+ lpeg=lpeg,
+ type=type,
+ tostring=tostring,
+ tonumber=tonumber,
+ format=string.format,
+ concat=table.concat,
+ signed=number.signed,
+ points=number.points,
+ basepoints=number.basepoints,
+ utfchar=utf.char,
+ utfbyte=utf.byte,
+ lpegmatch=lpeg.match,
+ nspaces=string.nspaces,
+ tracedchar=string.tracedchar,
+ autosingle=string.autosingle,
+ autodouble=string.autodouble,
+ sequenced=table.sequenced,
+ formattednumber=number.formatted,
+ sparseexponent=number.sparseexponent,
+ }
+end
+local arguments={ "a1" }
+setmetatable(arguments,{ __index=function(t,k)
+ local v=t[k-1]..",a"..k
+ t[k]=v
+ return v
+ end
+})
+local prefix_any=C((S("+- .")+R("09"))^0)
+local prefix_tab=P("{")*C((1-P("}"))^0)*P("}")+C((1-R("az","AZ","09","%%"))^0)
+local format_s=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',a%s)",f,n)
+ else
+ return format("(a%s or '')",n)
+ end
+end
+local format_S=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',tostring(a%s))",f,n)
+ else
+ return format("tostring(a%s)",n)
+ end
+end
+local format_q=function()
+ n=n+1
+ return format("(a%s and format('%%q',a%s) or '')",n,n)
+end
+local format_Q=function()
+ n=n+1
+ return format("format('%%q',tostring(a%s))",n)
+end
+local format_i=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%si',a%s)",f,n)
+ else
+ return format("format('%%i',a%s)",n)
+ end
+end
+local format_d=format_i
+local format_I=function(f)
+ n=n+1
+ return format("format('%%s%%%si',signed(a%s))",f,n)
+end
+local format_f=function(f)
+ n=n+1
+ return format("format('%%%sf',a%s)",f,n)
+end
+local format_F=function(f)
+ n=n+1
+ if not f or f=="" then
+ return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n)
+ else
+ return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n)
+ end
+end
+local format_g=function(f)
+ n=n+1
+ return format("format('%%%sg',a%s)",f,n)
+end
+local format_G=function(f)
+ n=n+1
+ return format("format('%%%sG',a%s)",f,n)
+end
+local format_e=function(f)
+ n=n+1
+ return format("format('%%%se',a%s)",f,n)
+end
+local format_E=function(f)
+ n=n+1
+ return format("format('%%%sE',a%s)",f,n)
+end
+local format_j=function(f)
+ n=n+1
+ return format("sparseexponent('%%%se',a%s)",f,n)
+end
+local format_J=function(f)
+ n=n+1
+ return format("sparseexponent('%%%sE',a%s)",f,n)
+end
+local format_x=function(f)
+ n=n+1
+ return format("format('%%%sx',a%s)",f,n)
+end
+local format_X=function(f)
+ n=n+1
+ return format("format('%%%sX',a%s)",f,n)
+end
+local format_o=function(f)
+ n=n+1
+ return format("format('%%%so',a%s)",f,n)
+end
+local format_c=function()
+ n=n+1
+ return format("utfchar(a%s)",n)
+end
+local format_C=function()
+ n=n+1
+ return format("tracedchar(a%s)",n)
+end
+local format_r=function(f)
+ n=n+1
+ return format("format('%%%s.0f',a%s)",f,n)
+end
+local format_h=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_H=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_u=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_U=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_p=function()
+ n=n+1
+ return format("points(a%s)",n)
+end
+local format_b=function()
+ n=n+1
+ return format("basepoints(a%s)",n)
+end
+local format_t=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("concat(a%s,%q)",n,f)
+ else
+ return format("concat(a%s)",n)
+ end
+end
+local format_T=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("sequenced(a%s,%q)",n,f)
+ else
+ return format("sequenced(a%s)",n)
+ end
+end
+local format_l=function()
+ n=n+1
+ return format("(a%s and 'true' or 'false')",n)
+end
+local format_L=function()
+ n=n+1
+ return format("(a%s and 'TRUE' or 'FALSE')",n)
+end
+local format_N=function()
+ n=n+1
+ return format("tostring(tonumber(a%s) or a%s)",n,n)
+end
+local format_a=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autosingle(a%s,%q)",n,f)
+ else
+ return format("autosingle(a%s)",n)
+ end
+end
+local format_A=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autodouble(a%s,%q)",n,f)
+ else
+ return format("autodouble(a%s)",n)
+ end
+end
+local format_w=function(f)
+ n=n+1
+ f=tonumber(f)
+ if f then
+ return format("nspaces[%s+a%s]",f,n)
+ else
+ return format("nspaces[a%s]",n)
+ end
+end
+local format_W=function(f)
+ return format("nspaces[%s]",tonumber(f) or 0)
+end
+local digit=patterns.digit
+local period=patterns.period
+local three=digit*digit*digit
+local splitter=Cs (
+ (((1-(three^1*period))^1+C(three))*(Carg(1)*three)^1+C((1-period)^1))*(P(1)/""*Carg(2))*C(2)
+)
+patterns.formattednumber=splitter
+function number.formatted(n,sep1,sep2)
+ local s=type(s)=="string" and n or format("%0.2f",n)
+ if sep1==true then
+ return lpegmatch(splitter,s,1,".",",")
+ elseif sep1=="." then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ",")
+ elseif sep1=="," then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ".")
+ else
+ return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".")
+ end
+end
+local format_m=function(f)
+ n=n+1
+ if not f or f=="" then
+ f=","
+ end
+ return format([[formattednumber(a%s,%q,".")]],n,f)
+end
+local format_M=function(f)
+ n=n+1
+ if not f or f=="" then
+ f="."
+ end
+ return format([[formattednumber(a%s,%q,",")]],n,f)
+end
+local format_z=function(f)
+ n=n+(tonumber(f) or 1)
+ return "''"
+end
+local format_rest=function(s)
+ return format("%q",s)
+end
+local format_extension=function(extensions,f,name)
+ local extension=extensions[name] or "tostring(%s)"
+ local f=tonumber(f) or 1
+ if f==0 then
+ return extension
+ elseif f==1 then
+ n=n+1
+ local a="a"..n
+ return format(extension,a,a)
+ elseif f<0 then
+ local a="a"..(n+f+1)
+ return format(extension,a,a)
+ else
+ local t={}
+ for i=1,f do
+ n=n+1
+ t[#t+1]="a"..n
+ end
+ return format(extension,unpack(t))
+ end
+end
+local builder=Cs { "start",
+ start=(
+ (
+ P("%")/""*(
+ V("!")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("c")+V("C")+V("S")
++V("Q")
++V("N")
++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("w")
++V("W")
++V("a")
++V("A")
++V("j")+V("J")
++V("m")+V("M")
++V("z")
+ )+V("*")
+ )*(P(-1)+Carg(1))
+ )^0,
+ ["s"]=(prefix_any*P("s"))/format_s,
+ ["q"]=(prefix_any*P("q"))/format_q,
+ ["i"]=(prefix_any*P("i"))/format_i,
+ ["d"]=(prefix_any*P("d"))/format_d,
+ ["f"]=(prefix_any*P("f"))/format_f,
+ ["F"]=(prefix_any*P("F"))/format_F,
+ ["g"]=(prefix_any*P("g"))/format_g,
+ ["G"]=(prefix_any*P("G"))/format_G,
+ ["e"]=(prefix_any*P("e"))/format_e,
+ ["E"]=(prefix_any*P("E"))/format_E,
+ ["x"]=(prefix_any*P("x"))/format_x,
+ ["X"]=(prefix_any*P("X"))/format_X,
+ ["o"]=(prefix_any*P("o"))/format_o,
+ ["S"]=(prefix_any*P("S"))/format_S,
+ ["Q"]=(prefix_any*P("Q"))/format_S,
+ ["N"]=(prefix_any*P("N"))/format_N,
+ ["c"]=(prefix_any*P("c"))/format_c,
+ ["C"]=(prefix_any*P("C"))/format_C,
+ ["r"]=(prefix_any*P("r"))/format_r,
+ ["h"]=(prefix_any*P("h"))/format_h,
+ ["H"]=(prefix_any*P("H"))/format_H,
+ ["u"]=(prefix_any*P("u"))/format_u,
+ ["U"]=(prefix_any*P("U"))/format_U,
+ ["p"]=(prefix_any*P("p"))/format_p,
+ ["b"]=(prefix_any*P("b"))/format_b,
+ ["t"]=(prefix_tab*P("t"))/format_t,
+ ["T"]=(prefix_tab*P("T"))/format_T,
+ ["l"]=(prefix_any*P("l"))/format_l,
+ ["L"]=(prefix_any*P("L"))/format_L,
+ ["I"]=(prefix_any*P("I"))/format_I,
+ ["w"]=(prefix_any*P("w"))/format_w,
+ ["W"]=(prefix_any*P("W"))/format_W,
+ ["j"]=(prefix_any*P("j"))/format_j,
+ ["J"]=(prefix_any*P("J"))/format_J,
+ ["m"]=(prefix_tab*P("m"))/format_m,
+ ["M"]=(prefix_tab*P("M"))/format_M,
+ ["z"]=(prefix_any*P("z"))/format_z,
+ ["a"]=(prefix_any*P("a"))/format_a,
+ ["A"]=(prefix_any*P("A"))/format_A,
+ ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest,
+ ["?"]=Cs(((1-P("%"))^1 )^1)/format_rest,
+ ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
+}
+local direct=Cs (
+ P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
+)
+local function make(t,str)
+ local f
+ local p
+ local p=lpegmatch(direct,str)
+ if p then
+ f=loadstripped(p)()
+ else
+ n=0
+ p=lpegmatch(builder,str,1,t._connector_,t._extensions_)
+ if n>0 then
+ p=format(template,preamble,t._preamble_,arguments[n],p)
+ f=loadstripped(p,t._environment_)()
+ else
+ f=function() return str end
+ end
+ end
+ t[str]=f
+ return f
+end
+local function use(t,fmt,...)
+ return t[fmt](...)
+end
+strings.formatters={}
+if _LUAVERSION<5.2 then
+ function strings.formatters.new(noconcat)
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
+else
+ function strings.formatters.new(noconcat)
+ local e={}
+ for k,v in next,environment do
+ e[k]=v
+ end
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
+end
+local formatters=strings.formatters.new()
+string.formatters=formatters
+string.formatter=function(str,...) return formatters[str](...) end
+local function add(t,name,template,preamble)
+ if type(t)=="table" and t._type_=="formatter" then
+ t._extensions_[name]=template or "%s"
+ if type(preamble)=="string" then
+ t._preamble_=preamble.."\n"..t._preamble_
+ elseif type(preamble)=="table" then
+ for k,v in next,preamble do
+ t._environment_[k]=v
+ end
+ end
+ end
+end
+strings.formatters.add=add
+patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;"+P(1))^0)
+patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
+patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
+patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
+if _LUAVERSION<5.2 then
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+else
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
+end
+local dquote=patterns.dquote
+local equote=patterns.escaped+dquote/'\\"'+1
+local space=patterns.space
+local cquote=Cc('"')
+local pattern=Cs(dquote*(equote-P(-2))^0*dquote)
++Cs(cquote*(equote-space)^0*space*equote^0*cquote)
+function string.optionalquoted(str)
+ return lpegmatch(pattern,str) or str
+end
+local pattern=Cs((newline/(os.newline or "\r")+1)^0)
+function string.replacenewlines(str)
+ return lpegmatch(pattern,str)
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luat-basics-gen']={
+ version=1.100,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local dummyfunction=function()
+end
+local dummyreporter=function(c)
+ return function(f,...)
+ local r=texio.reporter or texio.write_nl
+ if f then
+ r(c.." : "..string.formatters(f,...))
+ else
+ r("")
+ end
+ end
+end
+statistics={
+ register=dummyfunction,
+ starttiming=dummyfunction,
+ stoptiming=dummyfunction,
+ elapsedtime=nil,
+}
+directives={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
+}
+trackers={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
+}
+experiments={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
+}
+storage={
+ register=dummyfunction,
+ shared={},
+}
+logs={
+ new=dummyreporter,
+ reporter=dummyreporter,
+ messenger=dummyreporter,
+ report=dummyfunction,
+}
+callbacks={
+ register=function(n,f) return callback.register(n,f) end,
+}
+utilities={
+ storage={
+ allocate=function(t) return t or {} end,
+ mark=function(t) return t or {} end,
+ },
+}
+characters=characters or {
+ data={}
+}
+texconfig.kpse_init=true
+resolvers=resolvers or {}
+local remapper={
+ otf="opentype fonts",
+ ttf="truetype fonts",
+ ttc="truetype fonts",
+ dfont="truetype fonts",
+ cid="cid maps",
+ cidmap="cid maps",
+ fea="font feature files",
+ pfa="type1 fonts",
+ pfb="type1 fonts",
+ afm="afm",
+}
+function resolvers.findfile(name,fileformat)
+ name=string.gsub(name,"\\","/")
+ if not fileformat or fileformat=="" then
+ fileformat=file.suffix(name)
+ if fileformat=="" then
+ fileformat="tex"
+ end
+ end
+ fileformat=string.lower(fileformat)
+ fileformat=remapper[fileformat] or fileformat
+ local found=kpse.find_file(name,fileformat)
+ if not found or found=="" then
+ found=kpse.find_file(name,"other text files")
+ end
+ return found
+end
+resolvers.findbinfile=resolvers.findfile
+function resolvers.loadbinfile(filename,filetype)
+ local data=io.loaddata(filename)
+ return true,data,#data
+end
+function resolvers.resolve(s)
+ return s
+end
+function resolvers.unresolve(s)
+ return s
+end
+caches={}
+local writable=nil
+local readables={}
+local usingjit=jit
+if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then
+ caches.namespace='generic'
+end
+do
+ local cachepaths=kpse.expand_var('$TEXMFCACHE') or ""
+ if cachepaths=="" or cachepaths=="$TEXMFCACHE" then
+ cachepaths=kpse.expand_var('$TEXMFVAR') or ""
+ end
+ if cachepaths=="" or cachepaths=="$TEXMFVAR" then
+ cachepaths=kpse.expand_var('$VARTEXMF') or ""
+ end
+ if cachepaths=="" then
+ local fallbacks={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" }
+ for i=1,#fallbacks do
+ cachepaths=os.getenv(fallbacks[i]) or ""
+ if cachepath~="" and lfs.isdir(cachepath) then
+ break
+ end
+ end
+ end
+ if cachepaths=="" then
+ cachepaths="."
+ end
+ cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":")
+ for i=1,#cachepaths do
+ local cachepath=cachepaths[i]
+ if not lfs.isdir(cachepath) then
+ lfs.mkdirs(cachepath)
+ if lfs.isdir(cachepath) then
+ texio.write(string.format("(created cache path: %s)",cachepath))
+ end
+ end
+ if file.is_writable(cachepath) then
+ writable=file.join(cachepath,"luatex-cache")
+ lfs.mkdir(writable)
+ writable=file.join(writable,caches.namespace)
+ lfs.mkdir(writable)
+ break
+ end
+ end
+ for i=1,#cachepaths do
+ if file.is_readable(cachepaths[i]) then
+ readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace)
+ end
+ end
+ if not writable then
+ texio.write_nl("quiting: fix your writable cache path")
+ os.exit()
+ elseif #readables==0 then
+ texio.write_nl("quiting: fix your readable cache path")
+ os.exit()
+ elseif #readables==1 and readables[1]==writable then
+ texio.write(string.format("(using cache: %s)",writable))
+ else
+ texio.write(string.format("(using write cache: %s)",writable))
+ texio.write(string.format("(using read cache: %s)",table.concat(readables," ")))
+ end
+end
+function caches.getwritablepath(category,subcategory)
+ local path=file.join(writable,category)
+ lfs.mkdir(path)
+ path=file.join(path,subcategory)
+ lfs.mkdir(path)
+ return path
+end
+function caches.getreadablepaths(category,subcategory)
+ local t={}
+ for i=1,#readables do
+ t[i]=file.join(readables[i],category,subcategory)
+ end
+ return t
+end
+local function makefullname(path,name)
+ if path and path~="" then
+ return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),usingjit and "lub" or "luc")
+ end
+end
+function caches.is_writable(path,name)
+ local fullname=makefullname(path,name)
+ return fullname and file.is_writable(fullname)
+end
+function caches.loaddata(paths,name)
+ for i=1,#paths do
+ local data=false
+ local luaname,lucname=makefullname(paths[i],name)
+ if lucname and not lfs.isfile(lucname) and type(caches.compile)=="function" then
+ texio.write(string.format("(compiling luc: %s)",lucname))
+ data=loadfile(luaname)
+ if data then
+ data=data()
+ end
+ if data then
+ caches.compile(data,luaname,lucname)
+ return data
+ end
+ end
+ if lucname and lfs.isfile(lucname) then
+ texio.write(string.format("(load luc: %s)",lucname))
+ data=loadfile(lucname)
+ if data then
+ data=data()
+ end
+ if data then
+ return data
+ else
+ texio.write(string.format("(loading failed: %s)",lucname))
+ end
+ end
+ if luaname and lfs.isfile(luaname) then
+ texio.write(string.format("(load lua: %s)",luaname))
+ data=loadfile(luaname)
+ if data then
+ data=data()
+ end
+ if data then
+ return data
+ end
+ end
+ end
+end
+function caches.savedata(path,name,data)
+ local luaname,lucname=makefullname(path,name)
+ if luaname then
+ texio.write(string.format("(save: %s)",luaname))
+ table.tofile(luaname,data,true)
+ if lucname and type(caches.compile)=="function" then
+ os.remove(lucname)
+ texio.write(string.format("(save: %s)",lucname))
+ caches.compile(data,luaname,lucname)
+ end
+ end
+end
+function caches.compile(data,luaname,lucname)
+ local d=io.loaddata(luaname)
+ if not d or d=="" then
+ d=table.serialize(data,true)
+ end
+ if d and d~="" then
+ local f=io.open(lucname,'wb')
+ if f then
+ local s=loadstring(d)
+ if s then
+ f:write(string.dump(s,true))
+ end
+ f:close()
+ end
+ end
+end
+function table.setmetatableindex(t,f)
+ if type(t)~="table" then
+ f=f or t
+ t={}
+ end
+ setmetatable(t,{ __index=f })
+ return t
+end
+arguments={}
+if arg then
+ for i=1,#arg do
+ local k,v=string.match(arg[i],"^%-%-([^=]+)=?(.-)$")
+ if k and v then
+ arguments[k]=v
+ end
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['data-con']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub=string.format,string.lower,string.gsub
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end)
+local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end)
+containers=containers or {}
+local containers=containers
+containers.usecache=true
+local report_containers=logs.reporter("resolvers","containers")
+local allocated={}
+local mt={
+ __index=function(t,k)
+ if k=="writable" then
+ local writable=caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable=writable
+ return writable
+ elseif k=="readables" then
+ local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables=readables
+ return readables
+ end
+ end,
+ __storage__=true
+}
+function containers.define(category,subcategory,version,enabled)
+ if category and subcategory then
+ local c=allocated[category]
+ if not c then
+ c={}
+ allocated[category]=c
+ end
+ local s=c[subcategory]
+ if not s then
+ s={
+ category=category,
+ subcategory=subcategory,
+ storage={},
+ enabled=enabled,
+ version=version or math.pi,
+ trace=false,
+ }
+ setmetatable(s,mt)
+ c[subcategory]=s
+ end
+ return s
+ end
+end
+function containers.is_usable(container,name)
+ return container.enabled and caches and caches.is_writable(container.writable,name)
+end
+function containers.is_valid(container,name)
+ if name and name~="" then
+ local storage=container.storage[name]
+ return storage and storage.cache_version==container.version
+ else
+ return false
+ end
+end
+function containers.read(container,name)
+ local storage=container.storage
+ local stored=storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored=caches.loaddata(container.readables,name)
+ if stored and stored.cache_version==container.version then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","load",container.subcategory,name)
+ end
+ else
+ stored=nil
+ end
+ storage[name]=stored
+ elseif stored then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
+ end
+ end
+ return stored
+end
+function containers.write(container,name,data)
+ if data then
+ data.cache_version=container.version
+ if container.enabled and caches then
+ local unique,shared=data.unique,data.shared
+ data.unique,data.shared=nil,nil
+ caches.savedata(container.writable,name,data)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","save",container.subcategory,name)
+ end
+ data.unique,data.shared=unique,shared
+ end
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","store",container.subcategory,name)
+ end
+ container.storage[name]=data
+ end
+ return data
+end
+function containers.content(container,name)
+ return container.storage[name]
+end
+function containers.cleanname(name)
+ return (gsub(lower(name),"[^%w\128-\255]+","-"))
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-nod']={
+ version=1.001,
+ comment="companion to luatex-fonts.lua",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+if tex.attribute[0]~=0 then
+ texio.write_nl("log","!")
+ texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
+ texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
+ texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.")
+ texio.write_nl("log","!")
+ tex.attribute[0]=0
+end
+attributes=attributes or {}
+attributes.unsetvalue=-0x7FFFFFFF
+local numbers,last={},127
+attributes.private=attributes.private or function(name)
+ local number=numbers[name]
+ if not number then
+ if last<255 then
+ last=last+1
+ end
+ number=last
+ numbers[name]=number
+ end
+ return number
+end
+nodes={}
+nodes.pool={}
+nodes.handlers={}
+local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end
+local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end
+local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" }
+local disccodes={ [0]="discretionary","explicit","automatic","regular","first","second" }
+for i=0,#glyphcodes do glyphcodes[glyphcodes[i]]=i end
+for i=0,#disccodes do disccodes [disccodes [i]]=i end
+nodes.nodecodes=nodecodes
+nodes.whatcodes=whatcodes
+nodes.whatsitcodes=whatcodes
+nodes.glyphcodes=glyphcodes
+nodes.disccodes=disccodes
+local free_node=node.free
+local remove_node=node.remove
+local new_node=node.new
+local traverse_id=node.traverse_id
+nodes.handlers.protectglyphs=node.protect_glyphs
+nodes.handlers.unprotectglyphs=node.unprotect_glyphs
+local math_code=nodecodes.math
+local end_of_math=node.end_of_math
+function node.end_of_math(n)
+ if n.id==math_code and n.subtype==1 then
+ return n
+ else
+ return end_of_math(n)
+ end
+end
+function nodes.remove(head,current,free_too)
+ local t=current
+ head,current=remove_node(head,current)
+ if t then
+ if free_too then
+ free_node(t)
+ t=nil
+ else
+ t.next,t.prev=nil,nil
+ end
+ end
+ return head,current,t
+end
+function nodes.delete(head,current)
+ return nodes.remove(head,current,true)
+end
+function nodes.pool.kern(k)
+ local n=new_node("kern",1)
+ n.kern=k
+ return n
+end
+local getfield=node.getfield
+local setfield=node.setfield
+nodes.getfield=getfield
+nodes.setfield=setfield
+nodes.getattr=getfield
+nodes.setattr=setfield
+nodes.tostring=node.tostring or tostring
+nodes.copy=node.copy
+nodes.copy_list=node.copy_list
+nodes.delete=node.delete
+nodes.dimensions=node.dimensions
+nodes.end_of_math=node.end_of_math
+nodes.flush_list=node.flush_list
+nodes.flush_node=node.flush_node
+nodes.free=node.free
+nodes.insert_after=node.insert_after
+nodes.insert_before=node.insert_before
+nodes.hpack=node.hpack
+nodes.new=node.new
+nodes.tail=node.tail
+nodes.traverse=node.traverse
+nodes.traverse_id=node.traverse_id
+nodes.slide=node.slide
+nodes.vpack=node.vpack
+nodes.first_glyph=node.first_glyph
+nodes.first_character=node.first_character
+nodes.has_glyph=node.has_glyph or node.first_glyph
+nodes.current_attr=node.current_attr
+nodes.do_ligature_n=node.do_ligature_n
+nodes.has_field=node.has_field
+nodes.last_node=node.last_node
+nodes.usedlist=node.usedlist
+nodes.protrusion_skippable=node.protrusion_skippable
+nodes.write=node.write
+nodes.has_attribute=node.has_attribute
+nodes.set_attribute=node.set_attribute
+nodes.unset_attribute=node.unset_attribute
+nodes.protect_glyphs=node.protect_glyphs
+nodes.unprotect_glyphs=node.unprotect_glyphs
+nodes.mlist_to_hlist=node.mlist_to_hlist
+local direct=node.direct
+local nuts={}
+nodes.nuts=nuts
+local tonode=direct.tonode
+local tonut=direct.todirect
+nodes.tonode=tonode
+nodes.tonut=tonut
+nuts.tonode=tonode
+nuts.tonut=tonut
+local getfield=direct.getfield
+local setfield=direct.setfield
+nuts.getfield=getfield
+nuts.setfield=setfield
+nuts.getnext=direct.getnext
+nuts.getprev=direct.getprev
+nuts.getid=direct.getid
+nuts.getattr=getfield
+nuts.setattr=setfield
+nuts.getfont=direct.getfont
+nuts.getsubtype=direct.getsubtype
+nuts.getchar=direct.getchar
+nuts.insert_before=direct.insert_before
+nuts.insert_after=direct.insert_after
+nuts.delete=direct.delete
+nuts.copy=direct.copy
+nuts.copy_list=direct.copy_list
+nuts.tail=direct.tail
+nuts.flush_list=direct.flush_list
+nuts.free=direct.free
+nuts.remove=direct.remove
+nuts.is_node=direct.is_node
+nuts.end_of_math=direct.end_of_math
+nuts.traverse=direct.traverse
+nuts.traverse_id=direct.traverse_id
+nuts.getprop=nuts.getattr
+nuts.setprop=nuts.setattr
+local new_nut=direct.new
+nuts.new=new_nut
+nuts.pool={}
+function nuts.pool.kern(k)
+ local n=new_nut("kern",1)
+ setfield(n,"kern",k)
+ return n
+end
+local propertydata=direct.get_properties_table()
+nodes.properties={ data=propertydata }
+direct.set_properties_mode(true,true)
+function direct.set_properties_mode() end
+nuts.getprop=function(n,k)
+ local p=propertydata[n]
+ if p then
+ return p[k]
+ end
+end
+nuts.setprop=function(n,k,v)
+ if v then
+ local p=propertydata[n]
+ if p then
+ p[k]=v
+ else
+ propertydata[n]={ [k]=v }
+ end
+ end
+end
+nodes.setprop=nodes.setproperty
+nodes.getprop=nodes.getproperty
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-ini']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local report_defining=logs.reporter("fonts","defining")
+fonts=fonts or {}
+local fonts=fonts
+fonts.hashes={ identifiers=allocate() }
+fonts.tables=fonts.tables or {}
+fonts.helpers=fonts.helpers or {}
+fonts.tracers=fonts.tracers or {}
+fonts.specifiers=fonts.specifiers or {}
+fonts.analyzers={}
+fonts.readers={}
+fonts.definers={ methods={} }
+fonts.loggers={ register=function() end }
+fontloader.totable=fontloader.to_table
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-con']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,tostring,rawget=next,tostring,rawget
+local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub
+local utfbyte=utf.byte
+local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy
+local derivetable=table.derive
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end)
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local constructors=fonts.constructors or {}
+fonts.constructors=constructors
+local handlers=fonts.handlers or {}
+fonts.handlers=handlers
+local allocate=utilities.storage.allocate
+local setmetatableindex=table.setmetatableindex
+constructors.dontembed=allocate()
+constructors.autocleanup=true
+constructors.namemode="fullpath"
+constructors.version=1.01
+constructors.cache=containers.define("fonts","constructors",constructors.version,false)
+constructors.privateoffset=0xF0000
+constructors.cacheintex=true
+constructors.keys={
+ properties={
+ encodingbytes="number",
+ embedding="number",
+ cidinfo={},
+ format="string",
+ fontname="string",
+ fullname="string",
+ filename="filename",
+ psname="string",
+ name="string",
+ virtualized="boolean",
+ hasitalics="boolean",
+ autoitalicamount="basepoints",
+ nostackmath="boolean",
+ noglyphnames="boolean",
+ mode="string",
+ hasmath="boolean",
+ mathitalics="boolean",
+ textitalics="boolean",
+ finalized="boolean",
+ },
+ parameters={
+ mathsize="number",
+ scriptpercentage="float",
+ scriptscriptpercentage="float",
+ units="cardinal",
+ designsize="scaledpoints",
+ expansion={
+ stretch="integerscale",
+ shrink="integerscale",
+ step="integerscale",
+ auto="boolean",
+ },
+ protrusion={
+ auto="boolean",
+ },
+ slantfactor="float",
+ extendfactor="float",
+ factor="float",
+ hfactor="float",
+ vfactor="float",
+ size="scaledpoints",
+ units="scaledpoints",
+ scaledpoints="scaledpoints",
+ slantperpoint="scaledpoints",
+ spacing={
+ width="scaledpoints",
+ stretch="scaledpoints",
+ shrink="scaledpoints",
+ extra="scaledpoints",
+ },
+ xheight="scaledpoints",
+ quad="scaledpoints",
+ ascender="scaledpoints",
+ descender="scaledpoints",
+ synonyms={
+ space="spacing.width",
+ spacestretch="spacing.stretch",
+ spaceshrink="spacing.shrink",
+ extraspace="spacing.extra",
+ x_height="xheight",
+ space_stretch="spacing.stretch",
+ space_shrink="spacing.shrink",
+ extra_space="spacing.extra",
+ em="quad",
+ ex="xheight",
+ slant="slantperpoint",
+ },
+ },
+ description={
+ width="basepoints",
+ height="basepoints",
+ depth="basepoints",
+ boundingbox={},
+ },
+ character={
+ width="scaledpoints",
+ height="scaledpoints",
+ depth="scaledpoints",
+ italic="scaledpoints",
+ },
+}
+local designsizes=allocate()
+constructors.designsizes=designsizes
+local loadedfonts=allocate()
+constructors.loadedfonts=loadedfonts
+local factors={
+ pt=65536.0,
+ bp=65781.8,
+}
+function constructors.setfactor(f)
+ constructors.factor=factors[f or 'pt'] or factors.pt
+end
+constructors.setfactor()
+function constructors.scaled(scaledpoints,designsize)
+ if scaledpoints<0 then
+ local factor=constructors.factor
+ if designsize then
+ if designsize>factor then
+ return (- scaledpoints/1000)*designsize
+ else
+ return (- scaledpoints/1000)*designsize*factor
+ end
+ else
+ return (- scaledpoints/1000)*10*factor
+ end
+ else
+ return scaledpoints
+ end
+end
+function constructors.cleanuptable(tfmdata)
+ if constructors.autocleanup and tfmdata.properties.virtualized then
+ for k,v in next,tfmdata.characters do
+ if v.commands then v.commands=nil end
+ end
+ end
+end
+function constructors.calculatescale(tfmdata,scaledpoints)
+ local parameters=tfmdata.parameters
+ if scaledpoints<0 then
+ scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize)
+ end
+ return scaledpoints,scaledpoints/(parameters.units or 1000)
+end
+local unscaled={
+ ScriptPercentScaleDown=true,
+ ScriptScriptPercentScaleDown=true,
+ RadicalDegreeBottomRaisePercent=true
+}
+function constructors.assignmathparameters(target,original)
+ local mathparameters=original.mathparameters
+ if mathparameters and next(mathparameters) then
+ local targetparameters=target.parameters
+ local targetproperties=target.properties
+ local targetmathparameters={}
+ local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor
+ for name,value in next,mathparameters do
+ if unscaled[name] then
+ targetmathparameters[name]=value
+ else
+ targetmathparameters[name]=value*factor
+ end
+ end
+ if not targetmathparameters.FractionDelimiterSize then
+ targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size
+ end
+ if not mathparameters.FractionDelimiterDisplayStyleSize then
+ targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size
+ end
+ target.mathparameters=targetmathparameters
+ end
+end
+function constructors.beforecopyingcharacters(target,original)
+end
+function constructors.aftercopyingcharacters(target,original)
+end
+constructors.sharefonts=false
+constructors.nofsharedfonts=0
+local sharednames={}
+function constructors.trytosharefont(target,tfmdata)
+ if constructors.sharefonts then
+ local characters=target.characters
+ local n=1
+ local t={ target.psname }
+ local u=sortedkeys(characters)
+ for i=1,#u do
+ local k=u[i]
+ n=n+1;t[n]=k
+ n=n+1;t[n]=characters[k].index or k
+ end
+ local h=md5.HEX(concat(t," "))
+ local s=sharednames[h]
+ if s then
+ if trace_defining then
+ report_defining("font %a uses backend resources of font %a",target.fullname,s)
+ end
+ target.fullname=s
+ constructors.nofsharedfonts=constructors.nofsharedfonts+1
+ target.properties.sharedwith=s
+ else
+ sharednames[h]=target.fullname
+ end
+ end
+end
+function constructors.enhanceparameters(parameters)
+ local xheight=parameters.x_height
+ local quad=parameters.quad
+ local space=parameters.space
+ local stretch=parameters.space_stretch
+ local shrink=parameters.space_shrink
+ local extra=parameters.extra_space
+ local slant=parameters.slant
+ parameters.xheight=xheight
+ parameters.spacestretch=stretch
+ parameters.spaceshrink=shrink
+ parameters.extraspace=extra
+ parameters.em=quad
+ parameters.ex=xheight
+ parameters.slantperpoint=slant
+ parameters.spacing={
+ width=space,
+ stretch=stretch,
+ shrink=shrink,
+ extra=extra,
+ }
+end
+function constructors.scale(tfmdata,specification)
+ local target={}
+ if tonumber(specification) then
+ specification={ size=specification }
+ end
+ target.specification=specification
+ local scaledpoints=specification.size
+ local relativeid=specification.relativeid
+ local properties=tfmdata.properties or {}
+ local goodies=tfmdata.goodies or {}
+ local resources=tfmdata.resources or {}
+ local descriptions=tfmdata.descriptions or {}
+ local characters=tfmdata.characters or {}
+ local changed=tfmdata.changed or {}
+ local shared=tfmdata.shared or {}
+ local parameters=tfmdata.parameters or {}
+ local mathparameters=tfmdata.mathparameters or {}
+ local targetcharacters={}
+ local targetdescriptions=derivetable(descriptions)
+ local targetparameters=derivetable(parameters)
+ local targetproperties=derivetable(properties)
+ local targetgoodies=goodies
+ target.characters=targetcharacters
+ target.descriptions=targetdescriptions
+ target.parameters=targetparameters
+ target.properties=targetproperties
+ target.goodies=targetgoodies
+ target.shared=shared
+ target.resources=resources
+ target.unscaled=tfmdata
+ local mathsize=tonumber(specification.mathsize) or 0
+ local textsize=tonumber(specification.textsize) or scaledpoints
+ local forcedsize=tonumber(parameters.mathsize ) or 0
+ local extrafactor=tonumber(specification.factor ) or 1
+ if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then
+ scaledpoints=parameters.scriptpercentage*textsize/100
+ elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then
+ scaledpoints=parameters.scriptscriptpercentage*textsize/100
+ elseif forcedsize>1000 then
+ scaledpoints=forcedsize
+ end
+ targetparameters.mathsize=mathsize
+ targetparameters.textsize=textsize
+ targetparameters.forcedsize=forcedsize
+ targetparameters.extrafactor=extrafactor
+ local tounicode=fonts.mappings.tounicode
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local units=parameters.units or 1000
+ if target.fonts then
+ target.fonts=fastcopy(target.fonts)
+ end
+ targetproperties.language=properties.language or "dflt"
+ targetproperties.script=properties.script or "dflt"
+ targetproperties.mode=properties.mode or "base"
+ local askedscaledpoints=scaledpoints
+ local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints,nil,specification)
+ local hdelta=delta
+ local vdelta=delta
+ target.designsize=parameters.designsize
+ target.units=units
+ target.units_per_em=units
+ local direction=properties.direction or tfmdata.direction or 0
+ target.direction=direction
+ properties.direction=direction
+ target.size=scaledpoints
+ target.encodingbytes=properties.encodingbytes or 1
+ target.embedding=properties.embedding or "subset"
+ target.tounicode=1
+ target.cidinfo=properties.cidinfo
+ target.format=properties.format
+ target.cache=constructors.cacheintex and "yes" or "renew"
+ local fontname=properties.fontname or tfmdata.fontname
+ local fullname=properties.fullname or tfmdata.fullname
+ local filename=properties.filename or tfmdata.filename
+ local psname=properties.psname or tfmdata.psname
+ local name=properties.name or tfmdata.name
+ if not psname or psname=="" then
+ psname=fontname or (fullname and fonts.names.cleanname(fullname))
+ end
+ target.fontname=fontname
+ target.fullname=fullname
+ target.filename=filename
+ target.psname=psname
+ target.name=name
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.filename=filename
+ properties.psname=psname
+ properties.name=name
+ local expansion=parameters.expansion
+ if expansion then
+ target.stretch=expansion.stretch
+ target.shrink=expansion.shrink
+ target.step=expansion.step
+ target.auto_expand=expansion.auto
+ end
+ local protrusion=parameters.protrusion
+ if protrusion then
+ target.auto_protrude=protrusion.auto
+ end
+ local extendfactor=parameters.extendfactor or 0
+ if extendfactor~=0 and extendfactor~=1 then
+ hdelta=hdelta*extendfactor
+ target.extend=extendfactor*1000
+ else
+ target.extend=1000
+ end
+ local slantfactor=parameters.slantfactor or 0
+ if slantfactor~=0 then
+ target.slant=slantfactor*1000
+ else
+ target.slant=0
+ end
+ targetparameters.factor=delta
+ targetparameters.hfactor=hdelta
+ targetparameters.vfactor=vdelta
+ targetparameters.size=scaledpoints
+ targetparameters.units=units
+ targetparameters.scaledpoints=askedscaledpoints
+ local isvirtual=properties.virtualized or tfmdata.type=="virtual"
+ local hasquality=target.auto_expand or target.auto_protrude
+ local hasitalics=properties.hasitalics
+ local autoitalicamount=properties.autoitalicamount
+ local stackmath=not properties.nostackmath
+ local nonames=properties.noglyphnames
+ local haskerns=properties.haskerns or properties.mode=="base"
+ local hasligatures=properties.hasligatures or properties.mode=="base"
+ local realdimensions=properties.realdimensions
+ if changed and not next(changed) then
+ changed=false
+ end
+ target.type=isvirtual and "virtual" or "real"
+ target.postprocessors=tfmdata.postprocessors
+ local targetslant=(parameters.slant or parameters[1] or 0)*factors.pt
+ local targetspace=(parameters.space or parameters[2] or 0)*hdelta
+ local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta
+ local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta
+ local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta
+ local targetquad=(parameters.quad or parameters[6] or 0)*hdelta
+ local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta
+ targetparameters.slant=targetslant
+ targetparameters.space=targetspace
+ targetparameters.space_stretch=targetspace_stretch
+ targetparameters.space_shrink=targetspace_shrink
+ targetparameters.x_height=targetx_height
+ targetparameters.quad=targetquad
+ targetparameters.extra_space=targetextra_space
+ local ascender=parameters.ascender
+ if ascender then
+ targetparameters.ascender=delta*ascender
+ end
+ local descender=parameters.descender
+ if descender then
+ targetparameters.descender=delta*descender
+ end
+ constructors.enhanceparameters(targetparameters)
+ local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0
+ local scaledwidth=defaultwidth*hdelta
+ local scaledheight=defaultheight*vdelta
+ local scaleddepth=defaultdepth*vdelta
+ local hasmath=(properties.hasmath or next(mathparameters)) and true
+ if hasmath then
+ constructors.assignmathparameters(target,tfmdata)
+ properties.hasmath=true
+ target.nomath=false
+ target.MathConstants=target.mathparameters
+ else
+ properties.hasmath=false
+ target.nomath=true
+ target.mathparameters=nil
+ end
+ if hasmath then
+ local mathitalics=properties.mathitalics
+ if mathitalics==false then
+ if trace_defining then
+ report_defining("%s italics %s for font %a, fullname %a, filename %a","math",hasitalics and "ignored" or "disabled",name,fullname,filename)
+ end
+ hasitalics=false
+ autoitalicamount=false
+ end
+ else
+ local textitalics=properties.textitalics
+ if textitalics==false then
+ if trace_defining then
+ report_defining("%s italics %s for font %a, fullname %a, filename %a","text",hasitalics and "ignored" or "disabled",name,fullname,filename)
+ end
+ hasitalics=false
+ autoitalicamount=false
+ end
+ end
+ if trace_defining then
+ report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a",
+ name,fullname,filename,hdelta,vdelta,hasmath and "enabled" or "disabled",hasitalics and "enabled" or "disabled")
+ end
+ constructors.beforecopyingcharacters(target,tfmdata)
+ local sharedkerns={}
+ for unicode,character in next,characters do
+ local chr,description,index
+ if changed then
+ local c=changed[unicode]
+ if c then
+ description=descriptions[c] or descriptions[unicode] or character
+ character=characters[c] or character
+ index=description.index or c
+ else
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ end
+ else
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ end
+ local width=description.width
+ local height=description.height
+ local depth=description.depth
+ if realdimensions then
+ if not height or height==0 then
+ local bb=description.boundingbox
+ local ht=bb[4]
+ if ht~=0 then
+ height=ht
+ end
+ if not depth or depth==0 then
+ local dp=-bb[2]
+ if dp~=0 then
+ depth=dp
+ end
+ end
+ elseif not depth or depth==0 then
+ local dp=-description.boundingbox[2]
+ if dp~=0 then
+ depth=dp
+ end
+ end
+ end
+ if width then width=hdelta*width else width=scaledwidth end
+ if height then height=vdelta*height else height=scaledheight end
+ if depth and depth~=0 then
+ depth=delta*depth
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ end
+ else
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ width=width,
+ }
+ end
+ end
+ local isunicode=description.unicode
+ if isunicode then
+ chr.unicode=isunicode
+ chr.tounicode=tounicode(isunicode)
+ end
+ if hasquality then
+ local ve=character.expansion_factor
+ if ve then
+ chr.expansion_factor=ve*1000
+ end
+ local vl=character.left_protruding
+ if vl then
+ chr.left_protruding=protrusionfactor*width*vl
+ end
+ local vr=character.right_protruding
+ if vr then
+ chr.right_protruding=protrusionfactor*width*vr
+ end
+ end
+ if hasmath then
+ local vn=character.next
+ if vn then
+ chr.next=vn
+ else
+ local vv=character.vert_variants
+ if vv then
+ local t={}
+ for i=1,#vv do
+ local vvi=vv[i]
+ t[i]={
+ ["start"]=(vvi["start"] or 0)*vdelta,
+ ["end"]=(vvi["end"] or 0)*vdelta,
+ ["advance"]=(vvi["advance"] or 0)*vdelta,
+ ["extender"]=vvi["extender"],
+ ["glyph"]=vvi["glyph"],
+ }
+ end
+ chr.vert_variants=t
+ else
+ local hv=character.horiz_variants
+ if hv then
+ local t={}
+ for i=1,#hv do
+ local hvi=hv[i]
+ t[i]={
+ ["start"]=(hvi["start"] or 0)*hdelta,
+ ["end"]=(hvi["end"] or 0)*hdelta,
+ ["advance"]=(hvi["advance"] or 0)*hdelta,
+ ["extender"]=hvi["extender"],
+ ["glyph"]=hvi["glyph"],
+ }
+ end
+ chr.horiz_variants=t
+ end
+ end
+ end
+ local vi=character.vert_italic
+ if vi and vi~=0 then
+ chr.vert_italic=vi*hdelta
+ end
+ local va=character.accent
+ if va then
+ chr.top_accent=vdelta*va
+ end
+ if stackmath then
+ local mk=character.mathkerns
+ if mk then
+ local kerns={}
+ local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_right=k end
+ local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_left=k end
+ local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_left=k end
+ local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_right=k end
+ chr.mathkern=kerns
+ end
+ end
+ if hasitalics then
+ local vi=character.italic
+ if vi and vi~=0 then
+ chr.italic=vi*hdelta
+ end
+ end
+ elseif autoitalicamount then
+ local vi=description.italic
+ if not vi then
+ local vi=description.boundingbox[3]-description.width+autoitalicamount
+ if vi>0 then
+ chr.italic=vi*hdelta
+ end
+ elseif vi~=0 then
+ chr.italic=vi*hdelta
+ end
+ elseif hasitalics then
+ local vi=character.italic
+ if vi and vi~=0 then
+ chr.italic=vi*hdelta
+ end
+ end
+ if haskerns then
+ local vk=character.kerns
+ if vk then
+ local s=sharedkerns[vk]
+ if not s then
+ s={}
+ for k,v in next,vk do s[k]=v*hdelta end
+ sharedkerns[vk]=s
+ end
+ chr.kerns=s
+ end
+ end
+ if hasligatures then
+ local vl=character.ligatures
+ if vl then
+ if true then
+ chr.ligatures=vl
+ else
+ local tt={}
+ for i,l in next,vl do
+ tt[i]=l
+ end
+ chr.ligatures=tt
+ end
+ end
+ end
+ if isvirtual then
+ local vc=character.commands
+ if vc then
+ local ok=false
+ for i=1,#vc do
+ local key=vc[i][1]
+ if key=="right" or key=="down" then
+ ok=true
+ break
+ end
+ end
+ if ok then
+ local tt={}
+ for i=1,#vc do
+ local ivc=vc[i]
+ local key=ivc[1]
+ if key=="right" then
+ tt[i]={ key,ivc[2]*hdelta }
+ elseif key=="down" then
+ tt[i]={ key,ivc[2]*vdelta }
+ elseif key=="rule" then
+ tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta }
+ else
+ tt[i]=ivc
+ end
+ end
+ chr.commands=tt
+ else
+ chr.commands=vc
+ end
+ chr.index=nil
+ end
+ end
+ targetcharacters[unicode]=chr
+ end
+ properties.setitalics=hasitalics
+ constructors.aftercopyingcharacters(target,tfmdata)
+ constructors.trytosharefont(target,tfmdata)
+ return target
+end
+function constructors.finalize(tfmdata)
+ if tfmdata.properties and tfmdata.properties.finalized then
+ return
+ end
+ if not tfmdata.characters then
+ return nil
+ end
+ if not tfmdata.goodies then
+ tfmdata.goodies={}
+ end
+ local parameters=tfmdata.parameters
+ if not parameters then
+ return nil
+ end
+ if not parameters.expansion then
+ parameters.expansion={
+ stretch=tfmdata.stretch or 0,
+ shrink=tfmdata.shrink or 0,
+ step=tfmdata.step or 0,
+ auto=tfmdata.auto_expand or false,
+ }
+ end
+ if not parameters.protrusion then
+ parameters.protrusion={
+ auto=auto_protrude
+ }
+ end
+ if not parameters.size then
+ parameters.size=tfmdata.size
+ end
+ if not parameters.extendfactor then
+ parameters.extendfactor=tfmdata.extend or 0
+ end
+ if not parameters.slantfactor then
+ parameters.slantfactor=tfmdata.slant or 0
+ end
+ local designsize=parameters.designsize
+ if designsize then
+ parameters.minsize=tfmdata.minsize or designsize
+ parameters.maxsize=tfmdata.maxsize or designsize
+ else
+ designsize=factors.pt*10
+ parameters.designsize=designsize
+ parameters.minsize=designsize
+ parameters.maxsize=designsize
+ end
+ parameters.minsize=tfmdata.minsize or parameters.designsize
+ parameters.maxsize=tfmdata.maxsize or parameters.designsize
+ if not parameters.units then
+ parameters.units=tfmdata.units or tfmdata.units_per_em or 1000
+ end
+ if not tfmdata.descriptions then
+ local descriptions={}
+ setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end)
+ tfmdata.descriptions=descriptions
+ end
+ local properties=tfmdata.properties
+ if not properties then
+ properties={}
+ tfmdata.properties=properties
+ end
+ if not properties.virtualized then
+ properties.virtualized=tfmdata.type=="virtual"
+ end
+ if not tfmdata.properties then
+ tfmdata.properties={
+ fontname=tfmdata.fontname,
+ filename=tfmdata.filename,
+ fullname=tfmdata.fullname,
+ name=tfmdata.name,
+ psname=tfmdata.psname,
+ encodingbytes=tfmdata.encodingbytes or 1,
+ embedding=tfmdata.embedding or "subset",
+ tounicode=tfmdata.tounicode or 1,
+ cidinfo=tfmdata.cidinfo or nil,
+ format=tfmdata.format or "type1",
+ direction=tfmdata.direction or 0,
+ }
+ end
+ if not tfmdata.resources then
+ tfmdata.resources={}
+ end
+ if not tfmdata.shared then
+ tfmdata.shared={}
+ end
+ if not properties.hasmath then
+ properties.hasmath=not tfmdata.nomath
+ end
+ tfmdata.MathConstants=nil
+ tfmdata.postprocessors=nil
+ tfmdata.fontname=nil
+ tfmdata.filename=nil
+ tfmdata.fullname=nil
+ tfmdata.name=nil
+ tfmdata.psname=nil
+ tfmdata.encodingbytes=nil
+ tfmdata.embedding=nil
+ tfmdata.tounicode=nil
+ tfmdata.cidinfo=nil
+ tfmdata.format=nil
+ tfmdata.direction=nil
+ tfmdata.type=nil
+ tfmdata.nomath=nil
+ tfmdata.designsize=nil
+ tfmdata.size=nil
+ tfmdata.stretch=nil
+ tfmdata.shrink=nil
+ tfmdata.step=nil
+ tfmdata.auto_expand=nil
+ tfmdata.auto_protrude=nil
+ tfmdata.extend=nil
+ tfmdata.slant=nil
+ tfmdata.units=nil
+ tfmdata.units_per_em=nil
+ tfmdata.cache=nil
+ properties.finalized=true
+ return tfmdata
+end
+local hashmethods={}
+constructors.hashmethods=hashmethods
+function constructors.hashfeatures(specification)
+ local features=specification.features
+ if features then
+ local t,tn={},0
+ for category,list in next,features do
+ if next(list) then
+ local hasher=hashmethods[category]
+ if hasher then
+ local hash=hasher(list)
+ if hash then
+ tn=tn+1
+ t[tn]=category..":"..hash
+ end
+ end
+ end
+ end
+ if tn>0 then
+ return concat(t," & ")
+ end
+ end
+ return "unknown"
+end
+hashmethods.normal=function(list)
+ local s={}
+ local n=0
+ for k,v in next,list do
+ if not k then
+ elseif k=="number" or k=="features" then
+ else
+ n=n+1
+ s[n]=k
+ end
+ end
+ if n>0 then
+ sort(s)
+ for i=1,n do
+ local k=s[i]
+ s[i]=k..'='..tostring(list[k])
+ end
+ return concat(s,"+")
+ end
+end
+function constructors.hashinstance(specification,force)
+ local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks
+ if force or not hash then
+ hash=constructors.hashfeatures(specification)
+ specification.hash=hash
+ end
+ if size<1000 and designsizes[hash] then
+ size=math.round(constructors.scaled(size,designsizes[hash]))
+ specification.size=size
+ end
+ if fallbacks then
+ return hash..' @ '..tostring(size)..' @ '..fallbacks
+ else
+ return hash..' @ '..tostring(size)
+ end
+end
+function constructors.setname(tfmdata,specification)
+ if constructors.namemode=="specification" then
+ local specname=specification.specification
+ if specname then
+ tfmdata.properties.name=specname
+ if trace_defining then
+ report_otf("overloaded fontname %a",specname)
+ end
+ end
+ end
+end
+function constructors.checkedfilename(data)
+ local foundfilename=data.foundfilename
+ if not foundfilename then
+ local askedfilename=data.filename or ""
+ if askedfilename~="" then
+ askedfilename=resolvers.resolve(askedfilename)
+ foundfilename=resolvers.findbinfile(askedfilename,"") or ""
+ if foundfilename=="" then
+ report_defining("source file %a is not found",askedfilename)
+ foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or ""
+ if foundfilename~="" then
+ report_defining("using source file %a due to cache mismatch",foundfilename)
+ end
+ end
+ end
+ data.foundfilename=foundfilename
+ end
+ return foundfilename
+end
+local formats=allocate()
+fonts.formats=formats
+setmetatableindex(formats,function(t,k)
+ local l=lower(k)
+ if rawget(t,k) then
+ t[k]=l
+ return l
+ end
+ return rawget(t,file.suffix(l))
+end)
+local locations={}
+local function setindeed(mode,target,group,name,action,position)
+ local t=target[mode]
+ if not t then
+ report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode)
+ os.exit()
+ elseif position then
+ insert(t,position,{ name=name,action=action })
+ else
+ for i=1,#t do
+ local ti=t[i]
+ if ti.name==name then
+ ti.action=action
+ return
+ end
+ end
+ insert(t,{ name=name,action=action })
+ end
+end
+local function set(group,name,target,source)
+ target=target[group]
+ if not target then
+ report_defining("fatal target error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local source=source[group]
+ if not source then
+ report_defining("fatal source error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local node=source.node
+ local base=source.base
+ local position=source.position
+ if node then
+ setindeed("node",target,group,name,node,position)
+ end
+ if base then
+ setindeed("base",target,group,name,base,position)
+ end
+end
+local function register(where,specification)
+ local name=specification.name
+ if name and name~="" then
+ local default=specification.default
+ local description=specification.description
+ local initializers=specification.initializers
+ local processors=specification.processors
+ local manipulators=specification.manipulators
+ local modechecker=specification.modechecker
+ if default then
+ where.defaults[name]=default
+ end
+ if description and description~="" then
+ where.descriptions[name]=description
+ end
+ if initializers then
+ set('initializers',name,where,specification)
+ end
+ if processors then
+ set('processors',name,where,specification)
+ end
+ if manipulators then
+ set('manipulators',name,where,specification)
+ end
+ if modechecker then
+ where.modechecker=modechecker
+ end
+ end
+end
+constructors.registerfeature=register
+function constructors.getfeatureaction(what,where,mode,name)
+ what=handlers[what].features
+ if what then
+ where=what[where]
+ if where then
+ mode=where[mode]
+ if mode then
+ for i=1,#mode do
+ local m=mode[i]
+ if m.name==name then
+ return m.action
+ end
+ end
+ end
+ end
+ end
+end
+function constructors.newhandler(what)
+ local handler=handlers[what]
+ if not handler then
+ handler={}
+ handlers[what]=handler
+ end
+ return handler
+end
+function constructors.newfeatures(what)
+ local handler=handlers[what]
+ local features=handler.features
+ if not features then
+ local tables=handler.tables
+ local statistics=handler.statistics
+ features=allocate {
+ defaults={},
+ descriptions=tables and tables.features or {},
+ used=statistics and statistics.usedfeatures or {},
+ initializers={ base={},node={} },
+ processors={ base={},node={} },
+ manipulators={ base={},node={} },
+ }
+ features.register=function(specification) return register(features,specification) end
+ handler.features=features
+ end
+ return features
+end
+function constructors.checkedfeatures(what,features)
+ local defaults=handlers[what].features.defaults
+ if features and next(features) then
+ features=fastcopy(features)
+ for key,value in next,defaults do
+ if features[key]==nil then
+ features[key]=value
+ end
+ end
+ return features
+ else
+ return fastcopy(defaults)
+ end
+end
+function constructors.initializefeatures(what,tfmdata,features,trace,report)
+ if features and next(features) then
+ local properties=tfmdata.properties or {}
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatinitializers=whatfeatures.initializers
+ local whatmodechecker=whatfeatures.modechecker
+ local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base"
+ properties.mode=mode
+ features.mode=mode
+ local done={}
+ while true do
+ local redo=false
+ local initializers=whatfeatures.initializers[mode]
+ if initializers then
+ for i=1,#initializers do
+ local step=initializers[i]
+ local feature=step.name
+ local value=features[feature]
+ if not value then
+ elseif done[feature] then
+ else
+ local action=step.action
+ if trace then
+ report("initializing feature %a to %a for mode %a for font %a",feature,
+ value,mode,tfmdata.properties.fullname)
+ end
+ action(tfmdata,value,features)
+ if mode~=properties.mode or mode~=features.mode then
+ if whatmodechecker then
+ properties.mode=whatmodechecker(tfmdata,features,properties.mode)
+ features.mode=properties.mode
+ end
+ if mode~=properties.mode then
+ mode=properties.mode
+ redo=true
+ end
+ end
+ done[feature]=true
+ end
+ if redo then
+ break
+ end
+ end
+ if not redo then
+ break
+ end
+ else
+ break
+ end
+ end
+ properties.mode=mode
+ return true
+ else
+ return false
+ end
+end
+function constructors.collectprocessors(what,tfmdata,features,trace,report)
+ local processes,nofprocesses={},0
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatprocessors=whatfeatures.processors
+ local mode=properties.mode
+ local processors=whatprocessors[mode]
+ if processors then
+ for i=1,#processors do
+ local step=processors[i]
+ local feature=step.name
+ if features[feature] then
+ local action=step.action
+ if trace then
+ report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
+ end
+ if action then
+ nofprocesses=nofprocesses+1
+ processes[nofprocesses]=action
+ end
+ end
+ end
+ elseif trace then
+ report("no feature processors for mode %a for font %a",mode,properties.fullname)
+ end
+ end
+ return processes
+end
+function constructors.applymanipulators(what,tfmdata,features,trace,report)
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatmanipulators=whatfeatures.manipulators
+ local mode=properties.mode
+ local manipulators=whatmanipulators[mode]
+ if manipulators then
+ for i=1,#manipulators do
+ local step=manipulators[i]
+ local feature=step.name
+ local value=features[feature]
+ if value then
+ local action=step.action
+ if trace then
+ report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname)
+ end
+ if action then
+ action(tfmdata,feature,value)
+ end
+ end
+ end
+ end
+ end
+end
+function constructors.addcoreunicodes(unicodes)
+ if not unicodes then
+ unicodes={}
+ end
+ unicodes.space=0x0020
+ unicodes.hyphen=0x002D
+ unicodes.zwj=0x200D
+ unicodes.zwnj=0x200C
+ return unicodes
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-font-enc']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.encodings={}
+fonts.encodings.agl={}
+fonts.encodings.known={}
+setmetatable(fonts.encodings.agl,{ __index=function(t,k)
+ if k=="unicodes" then
+ texio.write(" <loading (extended) adobe glyph list>")
+ local unicodes=dofile(resolvers.findfile("font-age.lua"))
+ fonts.encodings.agl={ unicodes=unicodes }
+ return unicodes
+ else
+ return nil
+ end
+end })
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-cid']={
+ version=1.001,
+ comment="companion to font-otf.lua (cidmaps)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,match,lower=string.format,string.match,string.lower
+local tonumber=tonumber
+local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match
+local fonts,logs,trackers=fonts,logs,trackers
+local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local cid={}
+fonts.cid=cid
+local cidmap={}
+local cidmax=10
+local number=C(R("09","af","AF")^1)
+local space=S(" \n\r\t")
+local spaces=space^0
+local period=P(".")
+local periods=period*period
+local name=P("/")*C((1-space)^1)
+local unicodes,names={},{}
+local function do_one(a,b)
+ unicodes[tonumber(a)]=tonumber(b,16)
+end
+local function do_range(a,b,c)
+ c=tonumber(c,16)
+ for i=tonumber(a),tonumber(b) do
+ unicodes[i]=c
+ c=c+1
+ end
+end
+local function do_name(a,b)
+ names[tonumber(a)]=b
+end
+local grammar=P { "start",
+ start=number*spaces*number*V("series"),
+ series=(spaces*(V("one")+V("range")+V("named")))^1,
+ one=(number*spaces*number)/do_one,
+ range=(number*periods*number*spaces*number)/do_range,
+ named=(number*spaces*name)/do_name
+}
+local function loadcidfile(filename)
+ local data=io.loaddata(filename)
+ if data then
+ unicodes,names={},{}
+ lpegmatch(grammar,data)
+ local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$")
+ return {
+ supplement=supplement,
+ registry=registry,
+ ordering=ordering,
+ filename=filename,
+ unicodes=unicodes,
+ names=names,
+ }
+ end
+end
+cid.loadfile=loadcidfile
+local template="%s-%s-%s.cidmap"
+local function locate(registry,ordering,supplement)
+ local filename=format(template,registry,ordering,supplement)
+ local hashname=lower(filename)
+ local found=cidmap[hashname]
+ if not found then
+ if trace_loading then
+ report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename)
+ end
+ local fullname=resolvers.findfile(filename,'cid') or ""
+ if fullname~="" then
+ found=loadcidfile(fullname)
+ if found then
+ if trace_loading then
+ report_otf("using cidmap file %a",filename)
+ end
+ cidmap[hashname]=found
+ found.usedname=file.basename(filename)
+ end
+ end
+ end
+ return found
+end
+function cid.getmap(specification)
+ if not specification then
+ report_otf("invalid cidinfo specification, table expected")
+ return
+ end
+ local registry=specification.registry
+ local ordering=specification.ordering
+ local supplement=specification.supplement
+ local filename=format(registry,ordering,supplement)
+ local lowername=lower(filename)
+ local found=cidmap[lowername]
+ if found then
+ return found
+ end
+ if ordering=="Identity" then
+ local found={
+ supplement=supplement,
+ registry=registry,
+ ordering=ordering,
+ filename=filename,
+ unicodes={},
+ names={},
+ }
+ cidmap[lowername]=found
+ return found
+ end
+ if trace_loading then
+ report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement)
+ end
+ found=locate(registry,ordering,supplement)
+ if not found then
+ local supnum=tonumber(supplement)
+ local cidnum=nil
+ if supnum<cidmax then
+ for s=supnum+1,cidmax do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ if not found and supnum>0 then
+ for s=supnum-1,0,-1 do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ registry=lower(registry)
+ ordering=lower(ordering)
+ if found and cidnum>0 then
+ for s=0,cidnum-1 do
+ local filename=format(template,registry,ordering,s)
+ if not cidmap[filename] then
+ cidmap[filename]=found
+ end
+ end
+ end
+ end
+ return found
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-map']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local tonumber,next,type=tonumber,next,type
+local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower
+local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match
+local utfbyte=utf.byte
+local floor=math.floor
+local formatters=string.formatters
+local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end)
+local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end)
+local report_fonts=logs.reporter("fonts","loading")
+local fonts=fonts or {}
+local mappings=fonts.mappings or {}
+fonts.mappings=mappings
+local allocate=utilities.storage.allocate
+local hex=R("AF","09")
+local hexfour=(hex*hex*hex^-2)/function(s) return tonumber(s,16) end
+local hexsix=(hex*hex*hex^-4)/function(s) return tonumber(s,16) end
+local dec=(R("09")^1)/tonumber
+local period=P(".")
+local unicode=(P("uni")+P("UNI"))*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true))
+local ucode=(P("u")+P("U") )*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true))
+local index=P("index")*dec*Cc(false)
+local parser=unicode+ucode+index
+local parsers={}
+local function makenameparser(str)
+ if not str or str=="" then
+ return parser
+ else
+ local p=parsers[str]
+ if not p then
+ p=P(str)*period*dec*Cc(false)
+ parsers[str]=p
+ end
+ return p
+ end
+end
+local f_single=formatters["%04X"]
+local f_double=formatters["%04X%04X"]
+local function tounicode16(unicode,name)
+ if unicode<0x10000 then
+ return f_single(unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ return f_double(floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a in %a into tounicode",unicode,name)
+ end
+end
+local function tounicode16sequence(unicodes,name)
+ local t={}
+ for l=1,#unicodes do
+ local u=unicodes[l]
+ if u<0x10000 then
+ t[l]=f_single(u)
+ elseif unicode<0x1FFFFFFFFF then
+ t[l]=f_double(floor(u/1024),u%1024+0xDC00)
+ else
+ report_fonts ("can't convert %a in %a into tounicode",u,name)
+ return
+ end
+ end
+ return concat(t)
+end
+local function tounicode(unicode,name)
+ if type(unicode)=="table" then
+ local t={}
+ for l=1,#unicode do
+ local u=unicode[l]
+ if u<0x10000 then
+ t[l]=f_single(u)
+ elseif u<0x1FFFFFFFFF then
+ t[l]=f_double(floor(u/1024),u%1024+0xDC00)
+ else
+ report_fonts ("can't convert %a in %a into tounicode",u,name)
+ return
+ end
+ end
+ return concat(t)
+ else
+ if unicode<0x10000 then
+ return f_single(unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ return f_double(floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a in %a into tounicode",unicode,name)
+ end
+ end
+end
+local function fromunicode16(str)
+ if #str==4 then
+ return tonumber(str,16)
+ else
+ local l,r=match(str,"(....)(....)")
+ return (tonumber(l,16))*0x400+tonumber(r,16)-0xDC00
+ end
+end
+mappings.makenameparser=makenameparser
+mappings.tounicode=tounicode
+mappings.tounicode16=tounicode16
+mappings.tounicode16sequence=tounicode16sequence
+mappings.fromunicode16=fromunicode16
+local ligseparator=P("_")
+local varseparator=P(".")
+local namesplitter=Ct(C((1-ligseparator-varseparator)^1)*(ligseparator*C((1-ligseparator-varseparator)^1))^0)
+local overloads=allocate {
+ IJ={ name="I_J",unicode={ 0x49,0x4A },mess=0x0132 },
+ ij={ name="i_j",unicode={ 0x69,0x6A },mess=0x0133 },
+ ff={ name="f_f",unicode={ 0x66,0x66 },mess=0xFB00 },
+ fi={ name="f_i",unicode={ 0x66,0x69 },mess=0xFB01 },
+ fl={ name="f_l",unicode={ 0x66,0x6C },mess=0xFB02 },
+ ffi={ name="f_f_i",unicode={ 0x66,0x66,0x69 },mess=0xFB03 },
+ ffl={ name="f_f_l",unicode={ 0x66,0x66,0x6C },mess=0xFB04 },
+ fj={ name="f_j",unicode={ 0x66,0x6A } },
+ fk={ name="f_k",unicode={ 0x66,0x6B } },
+}
+for k,v in next,overloads do
+ local name=v.name
+ local mess=v.mess
+ if name then
+ overloads[name]=v
+ end
+ if mess then
+ overloads[mess]=v
+ end
+end
+mappings.overloads=overloads
+function mappings.addtounicode(data,filename,checklookups)
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ if not unicodes then
+ return
+ end
+ local properties=data.properties
+ local descriptions=data.descriptions
+ unicodes['space']=unicodes['space'] or 32
+ unicodes['hyphen']=unicodes['hyphen'] or 45
+ unicodes['zwj']=unicodes['zwj'] or 0x200D
+ unicodes['zwnj']=unicodes['zwnj'] or 0x200C
+ local private=fonts.constructors and fonts.constructors.privateoffset or 0xF0000
+ local unicodevector=fonts.encodings.agl.unicodes or {}
+ local contextvector=fonts.encodings.agl.ctxcodes or {}
+ local missing={}
+ local nofmissing=0
+ local oparser=nil
+ local cidnames=nil
+ local cidcodes=nil
+ local cidinfo=properties.cidinfo
+ local usedmap=cidinfo and fonts.cid.getmap(cidinfo)
+ local uparser=makenameparser()
+ if usedmap then
+ oparser=usedmap and makenameparser(cidinfo.ordering)
+ cidnames=usedmap.names
+ cidcodes=usedmap.unicodes
+ end
+ local ns=0
+ local nl=0
+ for unic,glyph in next,descriptions do
+ local name=glyph.name
+ if name then
+ local index=glyph.index
+ local r=overloads[name]
+ if r then
+ glyph.unicode=r.unicode
+ elseif not unic or unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
+ local unicode=unicodevector[name] or contextvector[name]
+ if unicode then
+ glyph.unicode=unicode
+ ns=ns+1
+ end
+ if (not unicode) and usedmap then
+ local foundindex=lpegmatch(oparser,name)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ glyph.unicode=unicode
+ ns=ns+1
+ else
+ local reference=cidnames[foundindex]
+ if reference then
+ local foundindex=lpegmatch(oparser,reference)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ glyph.unicode=unicode
+ ns=ns+1
+ end
+ end
+ if not unicode or unicode=="" then
+ local foundcodes,multiple=lpegmatch(uparser,reference)
+ if foundcodes then
+ glyph.unicode=foundcodes
+ if multiple then
+ nl=nl+1
+ unicode=true
+ else
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if not unicode or unicode=="" then
+ local split=lpegmatch(namesplitter,name)
+ local nsplit=split and #split or 0
+ if nsplit==0 then
+ elseif nsplit==1 then
+ local base=split[1]
+ local u=unicodes[base] or unicodevector[base] or contextvector[name]
+ if not u then
+ elseif type(u)=="table" then
+ if u[1]<private then
+ unicode=u
+ glyph.unicode=unicode
+ end
+ elseif u<private then
+ unicode=u
+ glyph.unicode=unicode
+ end
+ else
+ local t,n={},0
+ for l=1,nsplit do
+ local base=split[l]
+ local u=unicodes[base] or unicodevector[base] or contextvector[name]
+ if not u then
+ break
+ elseif type(u)=="table" then
+ if u[1]>=private then
+ break
+ end
+ n=n+1
+ t[n]=u[1]
+ else
+ if u>=private then
+ break
+ end
+ n=n+1
+ t[n]=u
+ end
+ end
+ if n>0 then
+ if n==1 then
+ unicode=t[1]
+ else
+ unicode=t
+ end
+ glyph.unicode=unicode
+ end
+ end
+ nl=nl+1
+ end
+ if not unicode or unicode=="" then
+ local foundcodes,multiple=lpegmatch(uparser,name)
+ if foundcodes then
+ glyph.unicode=foundcodes
+ if multiple then
+ nl=nl+1
+ unicode=true
+ else
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ local r=overloads[unicode]
+ if r then
+ unicode=r.unicode
+ glyph.unicode=unicode
+ end
+ if not unicode then
+ missing[unic]=true
+ nofmissing=nofmissing+1
+ end
+ end
+ else
+ end
+ end
+ if type(checklookups)=="function" then
+ checklookups(data,missing,nofmissing)
+ end
+ if trace_mapping then
+ for unic,glyph in table.sortedhash(descriptions) do
+ local name=glyph.name
+ local index=glyph.index
+ local unicode=glyph.unicode
+ if unicode then
+ if type(unicode)=="table" then
+ local unicodes={}
+ for i=1,#unicode do
+ unicodes[i]=formatters("%U",unicode[i])
+ end
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes)
+ else
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode)
+ end
+ else
+ report_fonts("internal slot %U, name %a, unicode %U",index,name,unic)
+ end
+ end
+ end
+ if trace_loading and (ns>0 or nl>0) then
+ report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-syn']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.names=fonts.names or {}
+fonts.names.version=1.001
+fonts.names.basename="luatex-fonts-names"
+fonts.names.new_to_old={}
+fonts.names.old_to_new={}
+fonts.names.cache=containers.define("fonts","data",fonts.names.version,true)
+local data,loaded=nil,false
+local fileformats={ "lua","tex","other text files" }
+function fonts.names.reportmissingbase()
+ texio.write("<missing font database, run: mtxrun --script fonts --reload --simple>")
+ fonts.names.reportmissingbase=nil
+end
+function fonts.names.reportmissingname()
+ texio.write("<unknown font in database, run: mtxrun --script fonts --reload --simple>")
+ fonts.names.reportmissingname=nil
+end
+function fonts.names.resolve(name,sub)
+ if not loaded then
+ local basename=fonts.names.basename
+ if basename and basename~="" then
+ data=containers.read(fonts.names.cache,basename)
+ if not data then
+ basename=file.addsuffix(basename,"lua")
+ for i=1,#fileformats do
+ local format=fileformats[i]
+ local foundname=resolvers.findfile(basename,format) or ""
+ if foundname~="" then
+ data=dofile(foundname)
+ texio.write("<font database loaded: ",foundname,">")
+ break
+ end
+ end
+ end
+ end
+ loaded=true
+ end
+ if type(data)=="table" and data.version==fonts.names.version then
+ local condensed=string.gsub(string.lower(name),"[^%a%d]","")
+ local found=data.mappings and data.mappings[condensed]
+ if found then
+ local fontname,filename,subfont=found[1],found[2],found[3]
+ if subfont then
+ return filename,fontname
+ else
+ return filename,false
+ end
+ elseif fonts.names.reportmissingname then
+ fonts.names.reportmissingname()
+ return name,false
+ end
+ elseif fonts.names.reportmissingbase then
+ fonts.names.reportmissingbase()
+ end
+end
+fonts.names.resolvespec=fonts.names.resolve
+function fonts.names.getfilename(askedname,suffix)
+ return ""
+end
+function fonts.names.ignoredfile(filename)
+ return false
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-tfm']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next=next
+local match=string.match
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local trace_features=false trackers.register("tfm.features",function(v) trace_features=v end)
+local report_defining=logs.reporter("fonts","defining")
+local report_tfm=logs.reporter("fonts","tfm loading")
+local findbinfile=resolvers.findbinfile
+local fonts=fonts
+local handlers=fonts.handlers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local encodings=fonts.encodings
+local tfm=constructors.newhandler("tfm")
+tfm.version=1.000
+tfm.maxnestingdepth=5
+tfm.maxnestingsize=65536*1024
+local tfmfeatures=constructors.newfeatures("tfm")
+local registertfmfeature=tfmfeatures.register
+constructors.resolvevirtualtoo=false
+fonts.formats.tfm="type1"
+function tfm.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm)
+ if okay then
+ return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm)
+ else
+ return {}
+ end
+end
+local depth={}
+local function read_from_tfm(specification)
+ local filename=specification.filename
+ local size=specification.size
+ depth[filename]=(depth[filename] or 0)+1
+ if trace_defining then
+ report_defining("loading tfm file %a at size %s",filename,size)
+ end
+ local tfmdata=font.read_tfm(filename,size)
+ if tfmdata then
+ local features=specification.features and specification.features.normal or {}
+ local resources=tfmdata.resources or {}
+ local properties=tfmdata.properties or {}
+ local parameters=tfmdata.parameters or {}
+ local shared=tfmdata.shared or {}
+ properties.name=tfmdata.name
+ properties.fontname=tfmdata.fontname
+ properties.psname=tfmdata.psname
+ properties.filename=specification.filename
+ properties.format=fonts.formats.tfm
+ parameters.size=size
+ tfmdata.properties=properties
+ tfmdata.resources=resources
+ tfmdata.parameters=parameters
+ tfmdata.shared=shared
+ shared.rawdata={}
+ shared.features=features
+ shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil
+ parameters.slant=parameters.slant or parameters[1] or 0
+ parameters.space=parameters.space or parameters[2] or 0
+ parameters.space_stretch=parameters.space_stretch or parameters[3] or 0
+ parameters.space_shrink=parameters.space_shrink or parameters[4] or 0
+ parameters.x_height=parameters.x_height or parameters[5] or 0
+ parameters.quad=parameters.quad or parameters[6] or 0
+ parameters.extra_space=parameters.extra_space or parameters[7] or 0
+ constructors.enhanceparameters(parameters)
+ if constructors.resolvevirtualtoo then
+ fonts.loggers.register(tfmdata,file.suffix(filename),specification)
+ local vfname=findbinfile(specification.name,'ovf')
+ if vfname and vfname~="" then
+ local vfdata=font.read_vf(vfname,size)
+ if vfdata then
+ local chars=tfmdata.characters
+ for k,v in next,vfdata.characters do
+ chars[k].commands=v.commands
+ end
+ properties.virtualized=true
+ tfmdata.fonts=vfdata.fonts
+ tfmdata.type="virtual"
+ local fontlist=vfdata.fonts
+ local name=file.nameonly(filename)
+ for i=1,#fontlist do
+ local n=fontlist[i].name
+ local s=fontlist[i].size
+ local d=depth[filename]
+ s=constructors.scaled(s,vfdata.designsize)
+ if d>tfm.maxnestingdepth then
+ report_defining("too deeply nested virtual font %a with size %a, max nesting depth %s",n,s,tfm.maxnestingdepth)
+ fontlist[i]={ id=0 }
+ elseif (d>1) and (s>tfm.maxnestingsize) then
+ report_defining("virtual font %a exceeds size %s",n,s)
+ fontlist[i]={ id=0 }
+ else
+ local t,id=fonts.constructors.readanddefine(n,s)
+ fontlist[i]={ id=id }
+ end
+ end
+ end
+ end
+ end
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm)
+ if not features.encoding then
+ local encoding,filename=match(properties.filename,"^(.-)%-(.*)$")
+ if filename and encoding and encodings.known and encodings.known[encoding] then
+ features.encoding=encoding
+ end
+ end
+ properties.haskerns=true
+ properties.haslogatures=true
+ resources.unicodes={}
+ resources.lookuptags={}
+ depth[filename]=depth[filename]-1
+ return tfmdata
+ else
+ depth[filename]=depth[filename]-1
+ end
+end
+local function check_tfm(specification,fullname)
+ local foundname=findbinfile(fullname,'tfm') or ""
+ if foundname=="" then
+ foundname=findbinfile(fullname,'ofm') or ""
+ end
+ if foundname=="" then
+ foundname=fonts.names.getfilename(fullname,"tfm") or ""
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="ofm"
+ return read_from_tfm(specification)
+ elseif trace_defining then
+ report_defining("loading tfm with name %a fails",specification.name)
+ end
+end
+readers.check_tfm=check_tfm
+function readers.tfm(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ return check_tfm(specification,fullname)
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-afm']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers
+local next,type,tonumber=next,type,tonumber
+local format,match,gmatch,lower,gsub,strip=string.format,string.match,string.gmatch,string.lower,string.gsub,string.strip
+local abs=math.abs
+local P,S,C,R,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.C,lpeg.R,lpeg.match,lpeg.patterns
+local derivetable=table.derive
+local trace_features=false trackers.register("afm.features",function(v) trace_features=v end)
+local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end)
+local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end)
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local report_afm=logs.reporter("fonts","afm loading")
+local setmetatableindex=table.setmetatableindex
+local findbinfile=resolvers.findbinfile
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local fontloader=fontloader
+local font_to_table=fontloader.to_table
+local open_font=fontloader.open
+local close_font=fontloader.close
+local afm=constructors.newhandler("afm")
+local pfb=constructors.newhandler("pfb")
+local afmfeatures=constructors.newfeatures("afm")
+local registerafmfeature=afmfeatures.register
+afm.version=1.500
+afm.cache=containers.define("fonts","afm",afm.version,true)
+afm.autoprefixed=true
+afm.helpdata={}
+afm.syncspace=true
+afm.addligatures=true
+afm.addtexligatures=true
+afm.addkerns=true
+local overloads=fonts.mappings.overloads
+local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode=lower(value)
+ end
+end
+registerafmfeature {
+ name="mode",
+ description="mode",
+ initializers={
+ base=setmode,
+ node=setmode,
+ }
+}
+local comment=P("Comment")
+local spacing=patterns.spacer
+local lineend=patterns.newline
+local words=C((1-lineend)^1)
+local number=C((R("09")+S("."))^1)/tonumber*spacing^0
+local data=lpeg.Carg(1)
+local pattern=(
+ comment*spacing*(
+ data*(
+ ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end
+ )+(1-lineend)^0
+ )+(1-comment)^1
+)^0
+local function scan_comment(str)
+ local fd={}
+ lpegmatch(pattern,str,1,fd)
+ return fd
+end
+local keys={}
+function keys.FontName (data,line) data.metadata.fontname=strip (line)
+ data.metadata.fullname=strip (line) end
+function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end
+function keys.IsFixedPitch(data,line) data.metadata.monospaced=toboolean(line,true) end
+function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end
+function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end
+function keys.Descender (data,line) data.metadata.descender=tonumber (line) end
+function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end
+function keys.Comment (data,line)
+ line=lower(line)
+ local designsize=match(line,"designsize[^%d]*(%d+)")
+ if designsize then data.metadata.designsize=tonumber(designsize) end
+end
+local function get_charmetrics(data,charmetrics,vector)
+ local characters=data.characters
+ local chr,ind={},0
+ for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do
+ if k=='C' then
+ v=tonumber(v)
+ if v<0 then
+ ind=ind+1
+ else
+ ind=v
+ end
+ chr={
+ index=ind
+ }
+ elseif k=='WX' then
+ chr.width=tonumber(v)
+ elseif k=='N' then
+ characters[v]=chr
+ elseif k=='B' then
+ local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$")
+ chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) }
+ elseif k=='L' then
+ local plus,becomes=match(v,"^(.-) +(.-)$")
+ local ligatures=chr.ligatures
+ if ligatures then
+ ligatures[plus]=becomes
+ else
+ chr.ligatures={ [plus]=becomes }
+ end
+ end
+ end
+end
+local function get_kernpairs(data,kernpairs)
+ local characters=data.characters
+ for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do
+ local chr=characters[one]
+ if chr then
+ local kerns=chr.kerns
+ if kerns then
+ kerns[two]=tonumber(value)
+ else
+ chr.kerns={ [two]=tonumber(value) }
+ end
+ end
+ end
+end
+local function get_variables(data,fontmetrics)
+ for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do
+ local keyhandler=keys[key]
+ if keyhandler then
+ keyhandler(data,rest)
+ end
+ end
+end
+local function get_indexes(data,pfbname)
+ data.resources.filename=resolvers.unresolve(pfbname)
+ local pfbblob=open_font(pfbname)
+ if pfbblob then
+ local characters=data.characters
+ local pfbdata=font_to_table(pfbblob)
+ if pfbdata then
+ local glyphs=pfbdata.glyphs
+ if glyphs then
+ if trace_loading then
+ report_afm("getting index data from %a",pfbname)
+ end
+ for index,glyph in next,glyphs do
+ local name=glyph.name
+ if name then
+ local char=characters[name]
+ if char then
+ if trace_indexing then
+ report_afm("glyph %a has index %a",name,index)
+ end
+ char.index=index
+ end
+ end
+ end
+ elseif trace_loading then
+ report_afm("no glyph data in pfb file %a",pfbname)
+ end
+ elseif trace_loading then
+ report_afm("no data in pfb file %a",pfbname)
+ end
+ close_font(pfbblob)
+ elseif trace_loading then
+ report_afm("invalid pfb file %a",pfbname)
+ end
+end
+local function readafm(filename)
+ local ok,afmblob,size=resolvers.loadbinfile(filename)
+ if ok and afmblob then
+ local data={
+ resources={
+ filename=resolvers.unresolve(filename),
+ version=afm.version,
+ creator="context mkiv",
+ },
+ properties={
+ hasitalics=false,
+ },
+ goodies={},
+ metadata={
+ filename=file.removesuffix(file.basename(filename))
+ },
+ characters={
+ },
+ descriptions={
+ },
+ }
+ afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics)
+ if trace_loading then
+ report_afm("loading char metrics")
+ end
+ get_charmetrics(data,charmetrics,vector)
+ return ""
+ end)
+ afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs)
+ if trace_loading then
+ report_afm("loading kern pairs")
+ end
+ get_kernpairs(data,kernpairs)
+ return ""
+ end)
+ afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics)
+ if trace_loading then
+ report_afm("loading variables")
+ end
+ data.afmversion=version
+ get_variables(data,fontmetrics)
+ data.fontdimens=scan_comment(fontmetrics)
+ return ""
+ end)
+ return data
+ else
+ if trace_loading then
+ report_afm("no valid afm file %a",filename)
+ end
+ return nil
+ end
+end
+local addkerns,addligatures,addtexligatures,unify,normalize,fixnames
+function afm.load(filename)
+ filename=resolvers.findfile(filename,'afm') or ""
+ if filename~="" and not fonts.names.ignoredfile(filename) then
+ local name=file.removesuffix(file.basename(filename))
+ local data=containers.read(afm.cache,name)
+ local attr=lfs.attributes(filename)
+ local size,time=attr.size or 0,attr.modification or 0
+ local pfbfile=file.replacesuffix(name,"pfb")
+ local pfbname=resolvers.findfile(pfbfile,"pfb") or ""
+ if pfbname=="" then
+ pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or ""
+ end
+ local pfbsize,pfbtime=0,0
+ if pfbname~="" then
+ local attr=lfs.attributes(pfbname)
+ pfbsize=attr.size or 0
+ pfbtime=attr.modification or 0
+ end
+ if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then
+ report_afm("reading %a",filename)
+ data=readafm(filename)
+ if data then
+ if pfbname~="" then
+ get_indexes(data,pfbname)
+ elseif trace_loading then
+ report_afm("no pfb file for %a",filename)
+ end
+ report_afm("unifying %a",filename)
+ unify(data,filename)
+ if afm.addligatures then
+ report_afm("add ligatures")
+ addligatures(data)
+ end
+ if afm.addtexligatures then
+ report_afm("add tex ligatures")
+ addtexligatures(data)
+ end
+ if afm.addkerns then
+ report_afm("add extra kerns")
+ addkerns(data)
+ end
+ normalize(data)
+ fixnames(data)
+ report_afm("add tounicode data")
+ fonts.mappings.addtounicode(data,filename)
+ data.size=size
+ data.time=time
+ data.pfbsize=pfbsize
+ data.pfbtime=pfbtime
+ report_afm("saving %a in cache",name)
+ data.resources.unicodes=nil
+ data=containers.write(afm.cache,name,data)
+ data=containers.read(afm.cache,name)
+ end
+ if applyruntimefixes and data then
+ applyruntimefixes(filename,data)
+ end
+ end
+ return data
+ else
+ return nil
+ end
+end
+local uparser=fonts.mappings.makenameparser()
+unify=function(data,filename)
+ local unicodevector=fonts.encodings.agl.unicodes
+ local unicodes,names={},{}
+ local private=constructors.privateoffset
+ local descriptions=data.descriptions
+ for name,blob in next,data.characters do
+ local code=unicodevector[name]
+ if not code then
+ code=lpegmatch(uparser,name)
+ if not code then
+ code=private
+ private=private+1
+ report_afm("assigning private slot %U for unknown glyph name %a",code,name)
+ end
+ end
+ local index=blob.index
+ unicodes[name]=code
+ names[name]=index
+ blob.name=name
+ descriptions[code]={
+ boundingbox=blob.boundingbox,
+ width=blob.width,
+ kerns=blob.kerns,
+ index=index,
+ name=name,
+ }
+ end
+ for unicode,description in next,descriptions do
+ local kerns=description.kerns
+ if kerns then
+ local krn={}
+ for name,kern in next,kerns do
+ local unicode=unicodes[name]
+ if unicode then
+ krn[unicode]=kern
+ else
+ end
+ end
+ description.kerns=krn
+ end
+ end
+ data.characters=nil
+ local resources=data.resources
+ local filename=resources.filename or file.removesuffix(file.basename(filename))
+ resources.filename=resolvers.unresolve(filename)
+ resources.unicodes=unicodes
+ resources.marks={}
+ resources.private=private
+end
+normalize=function(data)
+end
+fixnames=function(data)
+ for k,v in next,data.descriptions do
+ local n=v.name
+ local r=overloads[n]
+ if r then
+ local name=r.name
+ if trace_indexing then
+ report_afm("renaming characters %a to %a",n,name)
+ end
+ v.name=name
+ v.unicode=r.unicode
+ end
+ end
+end
+local addthem=function(rawdata,ligatures)
+ if ligatures then
+ local descriptions=rawdata.descriptions
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ for ligname,ligdata in next,ligatures do
+ local one=descriptions[unicodes[ligname]]
+ if one then
+ for _,pair in next,ligdata do
+ local two,three=unicodes[pair[1]],unicodes[pair[2]]
+ if two and three then
+ local ol=one.ligatures
+ if ol then
+ if not ol[two] then
+ ol[two]=three
+ end
+ else
+ one.ligatures={ [two]=three }
+ end
+ end
+ end
+ end
+ end
+ end
+end
+addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end
+addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end
+addkerns=function(rawdata)
+ local descriptions=rawdata.descriptions
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ local function do_it_left(what)
+ if what then
+ for unicode,description in next,descriptions do
+ local kerns=description.kerns
+ if kerns then
+ local extrakerns
+ for complex,simple in next,what do
+ complex=unicodes[complex]
+ simple=unicodes[simple]
+ if complex and simple then
+ local ks=kerns[simple]
+ if ks and not kerns[complex] then
+ if extrakerns then
+ extrakerns[complex]=ks
+ else
+ extrakerns={ [complex]=ks }
+ end
+ end
+ end
+ end
+ if extrakerns then
+ description.extrakerns=extrakerns
+ end
+ end
+ end
+ end
+ end
+ local function do_it_copy(what)
+ if what then
+ for complex,simple in next,what do
+ complex=unicodes[complex]
+ simple=unicodes[simple]
+ if complex and simple then
+ local complexdescription=descriptions[complex]
+ if complexdescription then
+ local simpledescription=descriptions[complex]
+ if simpledescription then
+ local extrakerns
+ local kerns=simpledescription.kerns
+ if kerns then
+ for unicode,kern in next,kerns do
+ if extrakerns then
+ extrakerns[unicode]=kern
+ else
+ extrakerns={ [unicode]=kern }
+ end
+ end
+ end
+ local extrakerns=simpledescription.extrakerns
+ if extrakerns then
+ for unicode,kern in next,extrakerns do
+ if extrakerns then
+ extrakerns[unicode]=kern
+ else
+ extrakerns={ [unicode]=kern }
+ end
+ end
+ end
+ if extrakerns then
+ complexdescription.extrakerns=extrakerns
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ do_it_left(afm.helpdata.leftkerned)
+ do_it_left(afm.helpdata.bothkerned)
+ do_it_copy(afm.helpdata.bothkerned)
+ do_it_copy(afm.helpdata.rightkerned)
+end
+local function adddimensions(data)
+ if data then
+ for unicode,description in next,data.descriptions do
+ local bb=description.boundingbox
+ if bb then
+ local ht,dp=bb[4],-bb[2]
+ if ht==0 or ht<0 then
+ else
+ description.height=ht
+ end
+ if dp==0 or dp<0 then
+ else
+ description.depth=dp
+ end
+ end
+ end
+ end
+end
+local function copytotfm(data)
+ if data and data.descriptions then
+ local metadata=data.metadata
+ local resources=data.resources
+ local properties=derivetable(data.properties)
+ local descriptions=derivetable(data.descriptions)
+ local goodies=derivetable(data.goodies)
+ local characters={}
+ local parameters={}
+ local unicodes=resources.unicodes
+ for unicode,description in next,data.descriptions do
+ characters[unicode]={}
+ end
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname or metadata.fullname
+ local fullname=metadata.fullname or metadata.fontname
+ local endash=0x0020
+ local emdash=0x2014
+ local spacer="space"
+ local spaceunits=500
+ local monospaced=metadata.monospaced
+ local charwidth=metadata.charwidth
+ local italicangle=metadata.italicangle
+ local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight
+ properties.monospaced=monospaced
+ parameters.italicangle=italicangle
+ parameters.charwidth=charwidth
+ parameters.charxheight=charxheight
+ if properties.monospaced then
+ if descriptions[endash] then
+ spaceunits,spacer=descriptions[endash].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width,"emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ else
+ if descriptions[endash] then
+ spaceunits,spacer=descriptions[endash].width,"space"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ end
+ spaceunits=tonumber(spaceunits)
+ if spaceunits<200 then
+ end
+ parameters.slant=0
+ parameters.space=spaceunits
+ parameters.space_stretch=500
+ parameters.space_shrink=333
+ parameters.x_height=400
+ parameters.quad=1000
+ if italicangle and italicangle~=0 then
+ parameters.italicangle=italicangle
+ parameters.italicfactor=math.cos(math.rad(90+italicangle))
+ parameters.slant=- math.tan(italicangle*math.pi/180)
+ end
+ if monospaced then
+ parameters.space_stretch=0
+ parameters.space_shrink=0
+ elseif afm.syncspace then
+ parameters.space_stretch=spaceunits/2
+ parameters.space_shrink=spaceunits/3
+ end
+ parameters.extra_space=parameters.space_shrink
+ if charxheight then
+ parameters.x_height=charxheight
+ else
+ local x=0x0078
+ if x then
+ local x=descriptions[x]
+ if x then
+ parameters.x_height=x.height
+ end
+ end
+ end
+ local fd=data.fontdimens
+ if fd and fd[8] and fd[9] and fd[10] then
+ for k,v in next,fd do
+ parameters[k]=v
+ end
+ end
+ parameters.designsize=(metadata.designsize or 10)*65536
+ parameters.ascender=abs(metadata.ascender or 0)
+ parameters.descender=abs(metadata.descender or 0)
+ parameters.units=1000
+ properties.spacer=spacer
+ properties.encodingbytes=2
+ properties.format=fonts.formats[filename] or "type1"
+ properties.filename=filename
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.psname=fullname
+ properties.name=filename or fullname or fontname
+ if next(characters) then
+ return {
+ characters=characters,
+ descriptions=descriptions,
+ parameters=parameters,
+ resources=resources,
+ properties=properties,
+ goodies=goodies,
+ }
+ end
+ end
+ return nil
+end
+function afm.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm)
+ if okay then
+ return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm)
+ else
+ return {}
+ end
+end
+local function addtables(data)
+ local resources=data.resources
+ local lookuptags=resources.lookuptags
+ local unicodes=resources.unicodes
+ if not lookuptags then
+ lookuptags={}
+ resources.lookuptags=lookuptags
+ end
+ setmetatableindex(lookuptags,function(t,k)
+ local v=type(k)=="number" and ("lookup "..k) or k
+ t[k]=v
+ return v
+ end)
+ if not unicodes then
+ unicodes={}
+ resources.unicodes=unicodes
+ setmetatableindex(unicodes,function(t,k)
+ setmetatableindex(unicodes,nil)
+ for u,d in next,data.descriptions do
+ local n=d.name
+ if n then
+ t[n]=u
+ end
+ end
+ return rawget(t,k)
+ end)
+ end
+ constructors.addcoreunicodes(unicodes)
+end
+local function afmtotfm(specification)
+ local afmname=specification.filename or specification.name
+ if specification.forced=="afm" or specification.format=="afm" then
+ if trace_loading then
+ report_afm("forcing afm format for %a",afmname)
+ end
+ else
+ local tfmname=findbinfile(afmname,"ofm") or ""
+ if tfmname~="" then
+ if trace_loading then
+ report_afm("fallback from afm to tfm for %a",afmname)
+ end
+ return
+ end
+ end
+ if afmname~="" then
+ local features=constructors.checkedfeatures("afm",specification.features.normal)
+ specification.features.normal=features
+ constructors.hashinstance(specification,true)
+ specification=definers.resolve(specification)
+ local cache_id=specification.hash
+ local tfmdata=containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local rawdata=afm.load(afmname)
+ if rawdata and next(rawdata) then
+ addtables(rawdata)
+ adddimensions(rawdata)
+ tfmdata=copytotfm(rawdata)
+ if tfmdata and next(tfmdata) then
+ local shared=tfmdata.shared
+ if not shared then
+ shared={}
+ tfmdata.shared=shared
+ end
+ shared.rawdata=rawdata
+ shared.features=features
+ shared.processes=afm.setfeatures(tfmdata,features)
+ end
+ elseif trace_loading then
+ report_afm("no (valid) afm file found with name %a",afmname)
+ end
+ tfmdata=containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+ end
+end
+local function read_from_afm(specification)
+ local tfmdata=afmtotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name=specification.name
+ tfmdata=constructors.scale(tfmdata,specification)
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm)
+ fonts.loggers.register(tfmdata,'afm',specification)
+ end
+ return tfmdata
+end
+local function prepareligatures(tfmdata,ligatures,value)
+ if value then
+ local descriptions=tfmdata.descriptions
+ local hasligatures=false
+ for unicode,character in next,tfmdata.characters do
+ local description=descriptions[unicode]
+ local dligatures=description.ligatures
+ if dligatures then
+ local cligatures=character.ligatures
+ if not cligatures then
+ cligatures={}
+ character.ligatures=cligatures
+ end
+ for unicode,ligature in next,dligatures do
+ cligatures[unicode]={
+ char=ligature,
+ type=0
+ }
+ end
+ hasligatures=true
+ end
+ end
+ tfmdata.properties.hasligatures=hasligatures
+ end
+end
+local function preparekerns(tfmdata,kerns,value)
+ if value then
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ local descriptions=tfmdata.descriptions
+ local haskerns=false
+ for u,chr in next,tfmdata.characters do
+ local d=descriptions[u]
+ local newkerns=d[kerns]
+ if newkerns then
+ local kerns=chr.kerns
+ if not kerns then
+ kerns={}
+ chr.kerns=kerns
+ end
+ for k,v in next,newkerns do
+ local uk=unicodes[k]
+ if uk then
+ kerns[uk]=v
+ end
+ end
+ haskerns=true
+ end
+ end
+ tfmdata.properties.haskerns=haskerns
+ end
+end
+local list={
+ [0x0027]=0x2019,
+}
+local function texreplacements(tfmdata,value)
+ local descriptions=tfmdata.descriptions
+ local characters=tfmdata.characters
+ for k,v in next,list do
+ characters [k]=characters [v]
+ descriptions[k]=descriptions[v]
+ end
+end
+local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures',value) end
+local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end
+local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns',value) end
+local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end
+registerafmfeature {
+ name="liga",
+ description="traditional ligatures",
+ initializers={
+ base=ligatures,
+ node=ligatures,
+ }
+}
+registerafmfeature {
+ name="kern",
+ description="intercharacter kerning",
+ initializers={
+ base=kerns,
+ node=kerns,
+ }
+}
+registerafmfeature {
+ name="extrakerns",
+ description="additional intercharacter kerning",
+ initializers={
+ base=extrakerns,
+ node=extrakerns,
+ }
+}
+registerafmfeature {
+ name='tlig',
+ description='tex ligatures',
+ initializers={
+ base=texligatures,
+ node=texligatures,
+ }
+}
+registerafmfeature {
+ name='trep',
+ description='tex replacements',
+ initializers={
+ base=texreplacements,
+ node=texreplacements,
+ }
+}
+local check_tfm=readers.check_tfm
+fonts.formats.afm="type1"
+fonts.formats.pfb="type1"
+local function check_afm(specification,fullname)
+ local foundname=findbinfile(fullname,'afm') or ""
+ if foundname=="" then
+ foundname=fonts.names.getfilename(fullname,"afm") or ""
+ end
+ if foundname=="" and afm.autoprefixed then
+ local encoding,shortname=match(fullname,"^(.-)%-(.*)$")
+ if encoding and shortname and fonts.encodings.known[encoding] then
+ shortname=findbinfile(shortname,'afm') or ""
+ if shortname~="" then
+ foundname=shortname
+ if trace_defining then
+ report_afm("stripping encoding prefix from filename %a",afmname)
+ end
+ end
+ end
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="afm"
+ return read_from_afm(specification)
+ end
+end
+function readers.afm(specification,method)
+ local fullname,tfmdata=specification.filename or "",nil
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ tfmdata=check_afm(specification,specification.name.."."..forced)
+ end
+ if not tfmdata then
+ method=method or definers.method or "afm or tfm"
+ if method=="tfm" then
+ tfmdata=check_tfm(specification,specification.name)
+ elseif method=="afm" then
+ tfmdata=check_afm(specification,specification.name)
+ elseif method=="tfm or afm" then
+ tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name)
+ else
+ tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name)
+ end
+ end
+ else
+ tfmdata=check_afm(specification,fullname)
+ end
+ return tfmdata
+end
+function readers.pfb(specification,method)
+ local original=specification.specification
+ if trace_defining then
+ report_afm("using afm reader for %a",original)
+ end
+ specification.specification=gsub(original,"%.pfb",".afm")
+ specification.forced="afm"
+ return readers.afm(specification,method)
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-afk']={
+ version=1.001,
+ comment="companion to font-afm.lua",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+ dataonly=true,
+}
+local allocate=utilities.storage.allocate
+fonts.handlers.afm.helpdata={
+ ligatures=allocate {
+ ['f']={
+ { 'f','ff' },
+ { 'i','fi' },
+ { 'l','fl' },
+ },
+ ['ff']={
+ { 'i','ffi' }
+ },
+ ['fi']={
+ { 'i','fii' }
+ },
+ ['fl']={
+ { 'i','fli' }
+ },
+ ['s']={
+ { 't','st' }
+ },
+ ['i']={
+ { 'j','ij' }
+ },
+ },
+ texligatures=allocate {
+ ['quoteleft']={
+ { 'quoteleft','quotedblleft' }
+ },
+ ['quoteright']={
+ { 'quoteright','quotedblright' }
+ },
+ ['hyphen']={
+ { 'hyphen','endash' }
+ },
+ ['endash']={
+ { 'hyphen','emdash' }
+ }
+ },
+ leftkerned=allocate {
+ AEligature="A",aeligature="a",
+ OEligature="O",oeligature="o",
+ IJligature="I",ijligature="i",
+ AE="A",ae="a",
+ OE="O",oe="o",
+ IJ="I",ij="i",
+ Ssharp="S",ssharp="s",
+ },
+ rightkerned=allocate {
+ AEligature="E",aeligature="e",
+ OEligature="E",oeligature="e",
+ IJligature="J",ijligature="j",
+ AE="E",ae="e",
+ OE="E",oe="e",
+ IJ="J",ij="j",
+ Ssharp="S",ssharp="s",
+ },
+ bothkerned=allocate {
+ Acircumflex="A",acircumflex="a",
+ Ccircumflex="C",ccircumflex="c",
+ Ecircumflex="E",ecircumflex="e",
+ Gcircumflex="G",gcircumflex="g",
+ Hcircumflex="H",hcircumflex="h",
+ Icircumflex="I",icircumflex="i",
+ Jcircumflex="J",jcircumflex="j",
+ Ocircumflex="O",ocircumflex="o",
+ Scircumflex="S",scircumflex="s",
+ Ucircumflex="U",ucircumflex="u",
+ Wcircumflex="W",wcircumflex="w",
+ Ycircumflex="Y",ycircumflex="y",
+ Agrave="A",agrave="a",
+ Egrave="E",egrave="e",
+ Igrave="I",igrave="i",
+ Ograve="O",ograve="o",
+ Ugrave="U",ugrave="u",
+ Ygrave="Y",ygrave="y",
+ Atilde="A",atilde="a",
+ Itilde="I",itilde="i",
+ Otilde="O",otilde="o",
+ Utilde="U",utilde="u",
+ Ntilde="N",ntilde="n",
+ Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a",
+ Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e",
+ Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i",
+ Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o",
+ Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u",
+ Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y",
+ Aacute="A",aacute="a",
+ Cacute="C",cacute="c",
+ Eacute="E",eacute="e",
+ Iacute="I",iacute="i",
+ Lacute="L",lacute="l",
+ Nacute="N",nacute="n",
+ Oacute="O",oacute="o",
+ Racute="R",racute="r",
+ Sacute="S",sacute="s",
+ Uacute="U",uacute="u",
+ Yacute="Y",yacute="y",
+ Zacute="Z",zacute="z",
+ Dstroke="D",dstroke="d",
+ Hstroke="H",hstroke="h",
+ Tstroke="T",tstroke="t",
+ Cdotaccent="C",cdotaccent="c",
+ Edotaccent="E",edotaccent="e",
+ Gdotaccent="G",gdotaccent="g",
+ Idotaccent="I",idotaccent="i",
+ Zdotaccent="Z",zdotaccent="z",
+ Amacron="A",amacron="a",
+ Emacron="E",emacron="e",
+ Imacron="I",imacron="i",
+ Omacron="O",omacron="o",
+ Umacron="U",umacron="u",
+ Ccedilla="C",ccedilla="c",
+ Kcedilla="K",kcedilla="k",
+ Lcedilla="L",lcedilla="l",
+ Ncedilla="N",ncedilla="n",
+ Rcedilla="R",rcedilla="r",
+ Scedilla="S",scedilla="s",
+ Tcedilla="T",tcedilla="t",
+ Ohungarumlaut="O",ohungarumlaut="o",
+ Uhungarumlaut="U",uhungarumlaut="u",
+ Aogonek="A",aogonek="a",
+ Eogonek="E",eogonek="e",
+ Iogonek="I",iogonek="i",
+ Uogonek="U",uogonek="u",
+ Aring="A",aring="a",
+ Uring="U",uring="u",
+ Abreve="A",abreve="a",
+ Ebreve="E",ebreve="e",
+ Gbreve="G",gbreve="g",
+ Ibreve="I",ibreve="i",
+ Obreve="O",obreve="o",
+ Ubreve="U",ubreve="u",
+ Ccaron="C",ccaron="c",
+ Dcaron="D",dcaron="d",
+ Ecaron="E",ecaron="e",
+ Lcaron="L",lcaron="l",
+ Ncaron="N",ncaron="n",
+ Rcaron="R",rcaron="r",
+ Scaron="S",scaron="s",
+ Tcaron="T",tcaron="t",
+ Zcaron="Z",zcaron="z",
+ dotlessI="I",dotlessi="i",
+ dotlessJ="J",dotlessj="j",
+ AEligature="AE",aeligature="ae",AE="AE",ae="ae",
+ OEligature="OE",oeligature="oe",OE="OE",oe="oe",
+ IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij",
+ Lstroke="L",lstroke="l",Lslash="L",lslash="l",
+ Ostroke="O",ostroke="o",Oslash="O",oslash="o",
+ Ssharp="SS",ssharp="ss",
+ Aumlaut="A",aumlaut="a",
+ Eumlaut="E",eumlaut="e",
+ Iumlaut="I",iumlaut="i",
+ Oumlaut="O",oumlaut="o",
+ Uumlaut="U",uumlaut="u",
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-tfm']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local tfm={}
+fonts.handlers.tfm=tfm
+fonts.formats.tfm="type1"
+function fonts.readers.tfm(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ local foundname=resolvers.findbinfile(fullname,'tfm') or ""
+ if foundname=="" then
+ foundname=resolvers.findbinfile(fullname,'ofm') or ""
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="ofm"
+ return font.read_tfm(specification.filename,specification.size)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-oti']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local lower=string.lower
+local fonts=fonts
+local constructors=fonts.constructors
+local otf=constructors.newhandler("otf")
+local otffeatures=constructors.newfeatures("otf")
+local otftables=otf.tables
+local registerotffeature=otffeatures.register
+local allocate=utilities.storage.allocate
+registerotffeature {
+ name="features",
+ description="initialization of feature handler",
+ default=true,
+}
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode=lower(value)
+ end
+end
+local function setlanguage(tfmdata,value)
+ if value then
+ local cleanvalue=lower(value)
+ local languages=otftables and otftables.languages
+ local properties=tfmdata.properties
+ if not languages then
+ properties.language=cleanvalue
+ elseif languages[value] then
+ properties.language=cleanvalue
+ else
+ properties.language="dflt"
+ end
+ end
+end
+local function setscript(tfmdata,value)
+ if value then
+ local cleanvalue=lower(value)
+ local scripts=otftables and otftables.scripts
+ local properties=tfmdata.properties
+ if not scripts then
+ properties.script=cleanvalue
+ elseif scripts[value] then
+ properties.script=cleanvalue
+ else
+ properties.script="dflt"
+ end
+ end
+end
+registerotffeature {
+ name="mode",
+ description="mode",
+ initializers={
+ base=setmode,
+ node=setmode,
+ }
+}
+registerotffeature {
+ name="language",
+ description="language",
+ initializers={
+ base=setlanguage,
+ node=setlanguage,
+ }
+}
+registerotffeature {
+ name="script",
+ description="script",
+ initializers={
+ base=setscript,
+ node=setscript,
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otf']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local utfbyte=utf.byte
+local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local abs=math.abs
+local reversed,concat,insert,remove,sortedkeys=table.reversed,table.concat,table.insert,table.remove,table.sortedkeys
+local ioflush=io.flush
+local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive
+local formatters=string.formatters
+local P,R,S,C,Ct,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.match
+local setmetatableindex=table.setmetatableindex
+local allocate=utilities.storage.allocate
+local registertracker=trackers.register
+local registerdirective=directives.register
+local starttiming=statistics.starttiming
+local stoptiming=statistics.stoptiming
+local elapsedtime=statistics.elapsedtime
+local findbinfile=resolvers.findbinfile
+local trace_private=false registertracker("otf.private",function(v) trace_private=v end)
+local trace_subfonts=false registertracker("otf.subfonts",function(v) trace_subfonts=v end)
+local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end)
+local trace_features=false registertracker("otf.features",function(v) trace_features=v end)
+local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end)
+local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end)
+local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end)
+local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end)
+local compact_lookups=true registertracker("otf.compactlookups",function(v) compact_lookups=v end)
+local purge_names=true registertracker("otf.purgenames",function(v) purge_names=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local fonts=fonts
+local otf=fonts.handlers.otf
+otf.glists={ "gsub","gpos" }
+otf.version=2.819
+otf.cache=containers.define("fonts","otf",otf.version,true)
+local hashes=fonts.hashes
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local fontdata=hashes and hashes.identifiers
+local chardata=characters and characters.data
+local otffeatures=constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local enhancers=allocate()
+otf.enhancers=enhancers
+local patches={}
+enhancers.patches=patches
+local forceload=false
+local cleanup=0
+local packdata=true
+local syncspace=true
+local forcenotdef=false
+local includesubfonts=false
+local overloadkerns=false
+local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
+local wildcard="*"
+local default="dflt"
+local fontloader=fontloader
+local open_font=fontloader.open
+local close_font=fontloader.close
+local font_fields=fontloader.fields
+local apply_featurefile=fontloader.apply_featurefile
+local mainfields=nil
+local glyphfields=nil
+local formats=fonts.formats
+formats.otf="opentype"
+formats.ttf="truetype"
+formats.ttc="truetype"
+formats.dfont="truetype"
+registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end)
+registerdirective("fonts.otf.loader.force",function(v) forceload=v end)
+registerdirective("fonts.otf.loader.pack",function(v) packdata=v end)
+registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
+registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
+registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end)
+function otf.fileformat(filename)
+ local leader=lower(io.loadchunk(filename,4))
+ local suffix=lower(file.suffix(filename))
+ if leader=="otto" then
+ return formats.otf,suffix=="otf"
+ elseif leader=="ttcf" then
+ return formats.ttc,suffix=="ttc"
+ elseif suffix=="ttc" then
+ return formats.ttc,true
+ elseif suffix=="dfont" then
+ return formats.dfont,true
+ else
+ return formats.ttf,suffix=="ttf"
+ end
+end
+local function otf_format(filename)
+ local format,okay=otf.fileformat(filename)
+ if not okay then
+ report_otf("font %a is actually an %a file",filename,format)
+ end
+ return format
+end
+local function load_featurefile(raw,featurefile)
+ if featurefile and featurefile~="" then
+ if trace_loading then
+ report_otf("using featurefile %a",featurefile)
+ end
+ apply_featurefile(raw,featurefile)
+ end
+end
+local function showfeatureorder(rawdata,filename)
+ local sequences=rawdata.resources.sequences
+ if sequences and #sequences>0 then
+ if trace_loading then
+ report_otf("font %a has %s sequences",filename,#sequences)
+ report_otf(" ")
+ end
+ for nos=1,#sequences do
+ local sequence=sequences[nos]
+ local typ=sequence.type or "no-type"
+ local name=sequence.name or "no-name"
+ local subtables=sequence.subtables or { "no-subtables" }
+ local features=sequence.features
+ if trace_loading then
+ report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables)
+ end
+ if features then
+ for feature,scripts in next,features do
+ local tt={}
+ if type(scripts)=="table" then
+ for script,languages in next,scripts do
+ local ttt={}
+ for language,_ in next,languages do
+ ttt[#ttt+1]=language
+ end
+ tt[#tt+1]=formatters["[%s: % t]"](script,ttt)
+ end
+ if trace_loading then
+ report_otf(" %s: % t",feature,tt)
+ end
+ else
+ if trace_loading then
+ report_otf(" %s: %S",feature,scripts)
+ end
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("\n")
+ end
+ elseif trace_loading then
+ report_otf("font %a has no sequences",filename)
+ end
+end
+local valid_fields=table.tohash {
+ "ascent",
+ "cidinfo",
+ "copyright",
+ "descent",
+ "design_range_bottom",
+ "design_range_top",
+ "design_size",
+ "encodingchanged",
+ "extrema_bound",
+ "familyname",
+ "fontname",
+ "fontstyle_id",
+ "fontstyle_name",
+ "fullname",
+ "hasvmetrics",
+ "horiz_base",
+ "issans",
+ "isserif",
+ "italicangle",
+ "macstyle",
+ "onlybitmaps",
+ "origname",
+ "os2_version",
+ "pfminfo",
+ "serifcheck",
+ "sfd_version",
+ "strokedfont",
+ "strokewidth",
+ "table_version",
+ "ttf_tables",
+ "uni_interp",
+ "uniqueid",
+ "units_per_em",
+ "upos",
+ "use_typo_metrics",
+ "uwidth",
+ "validation_state",
+ "version",
+ "vert_base",
+ "weight",
+ "weight_width_slope_only",
+}
+local ordered_enhancers={
+ "prepare tables",
+ "prepare glyphs",
+ "prepare lookups",
+ "analyze glyphs",
+ "analyze math",
+ "reorganize lookups",
+ "reorganize mark classes",
+ "reorganize anchor classes",
+ "reorganize glyph kerns",
+ "reorganize glyph lookups",
+ "reorganize glyph anchors",
+ "merge kern classes",
+ "reorganize features",
+ "reorganize subtables",
+ "check glyphs",
+ "check metadata",
+ "prepare tounicode",
+ "check encoding",
+ "add duplicates",
+ "expand lookups",
+ "cleanup tables",
+ "compact lookups",
+ "purge names",
+}
+local actions=allocate()
+local before=allocate()
+local after=allocate()
+patches.before=before
+patches.after=after
+local function enhance(name,data,filename,raw)
+ local enhancer=actions[name]
+ if enhancer then
+ if trace_loading then
+ report_otf("apply enhancement %a to file %a",name,filename)
+ ioflush()
+ end
+ enhancer(data,filename,raw)
+ else
+ end
+end
+function enhancers.apply(data,filename,raw)
+ local basename=file.basename(lower(filename))
+ if trace_loading then
+ report_otf("%s enhancing file %a","start",filename)
+ end
+ ioflush()
+ for e=1,#ordered_enhancers do
+ local enhancer=ordered_enhancers[e]
+ local b=before[enhancer]
+ if b then
+ for pattern,action in next,b do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ enhance(enhancer,data,filename,raw)
+ local a=after[enhancer]
+ if a then
+ for pattern,action in next,a do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ ioflush()
+ end
+ if trace_loading then
+ report_otf("%s enhancing file %a","stop",filename)
+ end
+ ioflush()
+end
+function patches.register(what,where,pattern,action)
+ local pw=patches[what]
+ if pw then
+ local ww=pw[where]
+ if ww then
+ ww[pattern]=action
+ else
+ pw[where]={ [pattern]=action}
+ end
+ end
+end
+function patches.report(fmt,...)
+ if trace_loading then
+ report_otf("patching: %s",formatters[fmt](...))
+ end
+end
+function enhancers.register(what,action)
+ actions[what]=action
+end
+function otf.load(filename,sub,featurefile)
+ local base=file.basename(file.removesuffix(filename))
+ local name=file.removesuffix(base)
+ local attr=lfs.attributes(filename)
+ local size=attr and attr.size or 0
+ local time=attr and attr.modification or 0
+ if featurefile then
+ name=name.."@"..file.removesuffix(file.basename(featurefile))
+ end
+ if sub=="" then
+ sub=false
+ end
+ local hash=name
+ if sub then
+ hash=hash.."-"..sub
+ end
+ hash=containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles={}
+ for s in gmatch(featurefile,"[^,]+") do
+ local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name=="" then
+ report_otf("loading error, no featurefile %a",s)
+ else
+ local attr=lfs.attributes(name)
+ featurefiles[#featurefiles+1]={
+ name=name,
+ size=attr and attr.size or 0,
+ time=attr and attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles==0 then
+ featurefiles=nil
+ end
+ end
+ local data=containers.read(otf.cache,hash)
+ local reload=not data or data.size~=size or data.time~=time
+ if forceload then
+ report_otf("forced reload of %a due to hard coded flag",filename)
+ reload=true
+ end
+ if not reload then
+ local featuredata=data.featuredata
+ if featurefiles then
+ if not featuredata or #featuredata~=#featurefiles then
+ reload=true
+ else
+ for i=1,#featurefiles do
+ local fi,fd=featurefiles[i],featuredata[i]
+ if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then
+ reload=true
+ break
+ end
+ end
+ end
+ elseif featuredata then
+ reload=true
+ end
+ if reload then
+ report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
+ end
+ end
+ if reload then
+ starttiming("fontloader")
+ report_otf("loading %a, hash %a",filename,hash)
+ local fontdata,messages
+ if sub then
+ fontdata,messages=open_font(filename,sub)
+ else
+ fontdata,messages=open_font(filename)
+ end
+ if fontdata then
+ mainfields=mainfields or (font_fields and font_fields(fontdata))
+ end
+ if trace_loading and messages and #messages>0 then
+ if type(messages)=="string" then
+ report_otf("warning: %s",messages)
+ else
+ for m=1,#messages do
+ report_otf("warning: %S",messages[m])
+ end
+ end
+ else
+ report_otf("loading done")
+ end
+ if fontdata then
+ if featurefiles then
+ for i=1,#featurefiles do
+ load_featurefile(fontdata,featurefiles[i].name)
+ end
+ end
+ local unicodes={
+ }
+ local splitter=lpeg.splitter(" ",unicodes)
+ data={
+ size=size,
+ time=time,
+ subfont=sub,
+ format=otf_format(filename),
+ featuredata=featurefiles,
+ resources={
+ filename=resolvers.unresolve(filename),
+ version=otf.version,
+ creator="context mkiv",
+ unicodes=unicodes,
+ indices={
+ },
+ duplicates={
+ },
+ variants={
+ },
+ lookuptypes={},
+ },
+ warnings={},
+ metadata={
+ },
+ properties={
+ },
+ descriptions={},
+ goodies={},
+ helpers={
+ tounicodelist=splitter,
+ tounicodetable=Ct(splitter),
+ },
+ }
+ report_otf("file size: %s",size)
+ enhancers.apply(data,filename,fontdata)
+ local packtime={}
+ if packdata then
+ if cleanup>0 then
+ collectgarbage("collect")
+ end
+ starttiming(packtime)
+ enhance("pack",data,filename,nil)
+ stoptiming(packtime)
+ end
+ report_otf("saving %a in cache",filename)
+ data=containers.write(otf.cache,hash,data)
+ if cleanup>1 then
+ collectgarbage("collect")
+ end
+ stoptiming("fontloader")
+ if elapsedtime then
+ report_otf("loading, optimizing, packing and caching time %s, pack time %s",
+ elapsedtime("fontloader"),packdata and elapsedtime(packtime) or 0)
+ end
+ close_font(fontdata)
+ if cleanup>3 then
+ collectgarbage("collect")
+ end
+ data=containers.read(otf.cache,hash)
+ if cleanup>2 then
+ collectgarbage("collect")
+ end
+ else
+ stoptiming("fontloader")
+ data=nil
+ report_otf("loading failed due to read error")
+ end
+ end
+ if data then
+ if trace_defining then
+ report_otf("loading from cache using hash %a",hash)
+ end
+ enhance("unpack",data,filename,nil,false)
+ local resources=data.resources
+ local lookuptags=resources.lookuptags
+ local unicodes=resources.unicodes
+ if not lookuptags then
+ lookuptags={}
+ resources.lookuptags=lookuptags
+ end
+ setmetatableindex(lookuptags,function(t,k)
+ local v=type(k)=="number" and ("lookup "..k) or k
+ t[k]=v
+ return v
+ end)
+ if not unicodes then
+ unicodes={}
+ resources.unicodes=unicodes
+ setmetatableindex(unicodes,function(t,k)
+ setmetatableindex(unicodes,nil)
+ for u,d in next,data.descriptions do
+ local n=d.name
+ if n then
+ t[n]=u
+ else
+ end
+ end
+ return rawget(t,k)
+ end)
+ end
+ constructors.addcoreunicodes(unicodes)
+ if applyruntimefixes then
+ applyruntimefixes(filename,data)
+ end
+ enhance("add dimensions",data,filename,nil,false)
+enhance("check extra features",data,filename)
+ if trace_sequences then
+ showfeatureorder(data,filename)
+ end
+ end
+ return data
+end
+local mt={
+ __index=function(t,k)
+ if k=="height" then
+ local ht=t.boundingbox[4]
+ return ht<0 and 0 or ht
+ elseif k=="depth" then
+ local dp=-t.boundingbox[2]
+ return dp<0 and 0 or dp
+ elseif k=="width" then
+ return 0
+ elseif k=="name" then
+ return forcenotdef and ".notdef"
+ end
+ end
+}
+actions["prepare tables"]=function(data,filename,raw)
+ data.properties.hasitalics=false
+end
+actions["add dimensions"]=function(data,filename)
+ if data then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local basename=trace_markwidth and file.basename(filename)
+ for _,d in next,descriptions do
+ local bb,wd=d.boundingbox,d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+ end
+ if bb then
+ local ht=bb[4]
+ local dp=-bb[2]
+ if ht==0 or ht<0 then
+ else
+ d.height=ht
+ end
+ if dp==0 or dp<0 then
+ else
+ d.depth=dp
+ end
+ end
+ end
+ end
+end
+local function somecopy(old)
+ if old then
+ local new={}
+ if type(old)=="table" then
+ for k,v in next,old do
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
+ else
+ new[k]=v
+ end
+ end
+ else
+ for i=1,#mainfields do
+ local k=mainfields[i]
+ local v=old[k]
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
+ else
+ new[k]=v
+ end
+ end
+ end
+ return new
+ else
+ return {}
+ end
+end
+actions["prepare glyphs"]=function(data,filename,raw)
+ local tableversion=tonumber(raw.table_version) or 0
+ local rawglyphs=raw.glyphs
+ local rawsubfonts=raw.subfonts
+ local rawcidinfo=raw.cidinfo
+ local criterium=constructors.privateoffset
+ local private=criterium
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local descriptions=data.descriptions
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ local variants=resources.variants
+ if rawsubfonts then
+ metadata.subfonts=includesubfonts and {}
+ properties.cidinfo=rawcidinfo
+ if rawcidinfo.registry then
+ local cidmap=fonts.cid.getmap(rawcidinfo)
+ if cidmap then
+ rawcidinfo.usedname=cidmap.usedname
+ local nofnames=0
+ local nofunicodes=0
+ local cidunicodes=cidmap.unicodes
+ local cidnames=cidmap.names
+ local cidtotal=0
+ local unique=trace_subfonts and {}
+ for cidindex=1,#rawsubfonts do
+ local subfont=rawsubfonts[cidindex]
+ local cidglyphs=subfont.glyphs
+ if includesubfonts then
+ metadata.subfonts[cidindex]=somecopy(subfont)
+ end
+ local cidcnt,cidmin,cidmax
+ if tableversion>0.3 then
+ cidcnt=subfont.glyphcnt
+ cidmin=subfont.glyphmin
+ cidmax=subfont.glyphmax
+ else
+ cidcnt=subfont.glyphcnt
+ cidmin=0
+ cidmax=cidcnt-1
+ end
+ if trace_subfonts then
+ local cidtot=cidmax-cidmin+1
+ cidtotal=cidtotal+cidtot
+ report_otf("subfont: %i, min: %i, max: %i, cnt: %i, n: %i",cidindex,cidmin,cidmax,cidtot,cidcnt)
+ end
+ if cidcnt>0 then
+ for cidslot=cidmin,cidmax do
+ local glyph=cidglyphs[cidslot]
+ if glyph then
+ local index=tableversion>0.3 and glyph.orig_pos or cidslot
+ if trace_subfonts then
+ unique[index]=true
+ end
+ local unicode=glyph.unicode
+ if unicode>=0x00E000 and unicode<=0x00F8FF then
+ unicode=-1
+ elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then
+ unicode=-1
+ elseif unicode>=0x100000 and unicode<=0x10FFFD then
+ unicode=-1
+ end
+ local name=glyph.name or cidnames[index]
+ if not unicode or unicode==-1 then
+ unicode=cidunicodes[index]
+ end
+ if unicode and descriptions[unicode] then
+ if trace_private then
+ report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
+ end
+ unicode=-1
+ end
+ if not unicode or unicode==-1 then
+ if not name then
+ name=formatters["u%06X.ctx"](private)
+ end
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private=private+1
+ nofnames=nofnames+1
+ else
+ if not name then
+ name=formatters["u%06X.ctx"](unicode)
+ end
+ unicodes[name]=unicode
+ nofunicodes=nofunicodes+1
+ end
+ indices[index]=unicode
+ local description={
+ boundingbox=glyph.boundingbox,
+ name=name or "unknown",
+ cidindex=cidindex,
+ index=cidslot,
+ glyph=glyph,
+ }
+ descriptions[unicode]=description
+ local altuni=glyph.altuni
+ if altuni then
+ for i=1,#altuni do
+ local a=altuni[i]
+ local u=a.unicode
+ if u~=unicode then
+ local v=a.variant
+ if v then
+ local vv=variants[v]
+ if vv then
+ vv[u]=unicode
+ else
+ vv={ [u]=unicode }
+ variants[v]=vv
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ else
+ report_otf("potential problem: no glyphs found in subfont %i",cidindex)
+ end
+ end
+ if trace_subfonts then
+ report_otf("nofglyphs: %i, unique: %i",cidtotal,table.count(unique))
+ end
+ if trace_loading then
+ report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames)
+ end
+ elseif trace_loading then
+ report_otf("unable to remap cid font, missing cid file for %a",filename)
+ end
+ elseif trace_loading then
+ report_otf("font %a has no glyphs",filename)
+ end
+ else
+ local cnt=raw.glyphcnt or 0
+ local min=tableversion>0.3 and raw.glyphmin or 0
+ local max=tableversion>0.3 and raw.glyphmax or (raw.glyphcnt-1)
+ if cnt>0 then
+ for index=min,max do
+ local glyph=rawglyphs[index]
+ if glyph then
+ local unicode=glyph.unicode
+ local name=glyph.name
+ if not unicode or unicode==-1 then
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private=private+1
+ else
+ if unicode>criterium then
+ local taken=descriptions[unicode]
+ if taken then
+ if unicode>=private then
+ private=unicode+1
+ else
+ private=private+1
+ end
+ descriptions[private]=taken
+ unicodes[taken.name]=private
+ indices[taken.index]=private
+ if trace_private then
+ report_otf("slot %U is moved to %U due to private in font",unicode)
+ end
+ else
+ if unicode>=private then
+ private=unicode+1
+ end
+ end
+ end
+ unicodes[name]=unicode
+ end
+ indices[index]=unicode
+ descriptions[unicode]={
+ boundingbox=glyph.boundingbox,
+ name=name,
+ index=index,
+ glyph=glyph,
+ }
+ local altuni=glyph.altuni
+ if altuni then
+ for i=1,#altuni do
+ local a=altuni[i]
+ local u=a.unicode
+ if u~=unicode then
+ local v=a.variant
+ if v then
+ local vv=variants[v]
+ if vv then
+ vv[u]=unicode
+ else
+ vv={ [u]=unicode }
+ variants[v]=vv
+ end
+ end
+ end
+ end
+ end
+ else
+ report_otf("potential problem: glyph %U is used but empty",index)
+ end
+ end
+ else
+ report_otf("potential problem: no glyphs found")
+ end
+ end
+ resources.private=private
+end
+actions["check encoding"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ local mapdata=raw.map or {}
+ local unicodetoindex=mapdata and mapdata.map or {}
+ local indextounicode=mapdata and mapdata.backmap or {}
+ local encname=lower(data.enc_name or mapdata.enc_name or "")
+ local criterium=0xFFFF
+ local privateoffset=constructors.privateoffset
+ if find(encname,"unicode") then
+ if trace_loading then
+ report_otf("checking embedded unicode map %a",encname)
+ end
+ local reported={}
+ for maybeunicode,index in next,unicodetoindex do
+ if descriptions[maybeunicode] then
+ else
+ local unicode=indices[index]
+ if not unicode then
+ elseif maybeunicode==unicode then
+ elseif unicode>privateoffset then
+ else
+ local d=descriptions[unicode]
+ if d then
+ local c=d.copies
+ if c then
+ c[maybeunicode]=true
+ else
+ d.copies={ [maybeunicode]=true }
+ end
+ elseif index and not reported[index] then
+ report_otf("missing index %i",index)
+ reported[index]=true
+ end
+ end
+ end
+ end
+ for unicode,data in next,descriptions do
+ local d=data.copies
+ if d then
+ duplicates[unicode]=sortedkeys(d)
+ data.copies=nil
+ end
+ end
+ elseif properties.cidinfo then
+ report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
+ else
+ report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever")
+ end
+ if mapdata then
+ mapdata.map={}
+ mapdata.backmap={}
+ end
+end
+actions["add duplicates"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ for unicode,d in next,duplicates do
+ local nofduplicates=#d
+ if nofduplicates>4 then
+ if trace_loading then
+ report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
+ end
+ else
+ for i=1,nofduplicates do
+ local u=d[i]
+ if not descriptions[u] then
+ local description=descriptions[unicode]
+ local n=0
+ for _,description in next,descriptions do
+ local kerns=description.kerns
+ if kerns then
+ for _,k in next,kerns do
+ local ku=k[unicode]
+ if ku then
+ k[u]=ku
+ n=n+1
+ end
+ end
+ end
+ end
+ if u>0 then
+ local duplicate=table.copy(description)
+ duplicate.comment=formatters["copy of %U"](unicode)
+ descriptions[u]=duplicate
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ end
+ end
+ end
+ end
+ end
+ end
+end
+actions["analyze glyphs"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local hasitalics=false
+ local widths={}
+ local marks={}
+ for unicode,description in next,descriptions do
+ local glyph=description.glyph
+ local italic=glyph.italic_correction
+ if not italic then
+ elseif italic==0 then
+ else
+ description.italic=italic
+ hasitalics=true
+ end
+ local width=glyph.width
+ widths[width]=(widths[width] or 0)+1
+ local class=glyph.class
+ if class then
+ if class=="mark" then
+ marks[unicode]=true
+ end
+ description.class=class
+ end
+ end
+ properties.hasitalics=hasitalics
+ resources.marks=marks
+ local wd,most=0,1
+ for k,v in next,widths do
+ if v>most then
+ wd,most=k,v
+ end
+ end
+ if most>1000 then
+ if trace_loading then
+ report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
+ end
+ for unicode,description in next,descriptions do
+ if description.width==wd then
+ else
+ description.width=description.glyph.width
+ end
+ end
+ resources.defaultwidth=wd
+ else
+ for unicode,description in next,descriptions do
+ description.width=description.glyph.width
+ end
+ end
+end
+actions["reorganize mark classes"]=function(data,filename,raw)
+ local mark_classes=raw.mark_classes
+ if mark_classes then
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local markclasses={}
+ resources.markclasses=markclasses
+ for name,class in next,mark_classes do
+ local t={}
+ for s in gmatch(class,"[^ ]+") do
+ t[unicodes[s]]=true
+ end
+ markclasses[name]=t
+ end
+ end
+end
+actions["reorganize features"]=function(data,filename,raw)
+ local features={}
+ data.resources.features=features
+ for k=1,#otf.glists do
+ local what=otf.glists[k]
+ local dw=raw[what]
+ if dw then
+ local f={}
+ features[what]=f
+ for i=1,#dw do
+ local d=dw[i]
+ local dfeatures=d.features
+ if dfeatures then
+ for i=1,#dfeatures do
+ local df=dfeatures[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+actions["reorganize anchor classes"]=function(data,filename,raw)
+ local resources=data.resources
+ local anchor_to_lookup={}
+ local lookup_to_anchor={}
+ resources.anchor_to_lookup=anchor_to_lookup
+ resources.lookup_to_anchor=lookup_to_anchor
+ local classes=raw.anchor_classes
+ if classes then
+ for c=1,#classes do
+ local class=classes[c]
+ local anchor=class.name
+ local lookups=class.lookup
+ if type(lookups)~="table" then
+ lookups={ lookups }
+ end
+ local a=anchor_to_lookup[anchor]
+ if not a then
+ a={}
+ anchor_to_lookup[anchor]=a
+ end
+ for l=1,#lookups do
+ local lookup=lookups[l]
+ local l=lookup_to_anchor[lookup]
+ if l then
+ l[anchor]=true
+ else
+ l={ [anchor]=true }
+ lookup_to_anchor[lookup]=l
+ end
+ a[lookup]=true
+ end
+ end
+ end
+end
+actions["prepare tounicode"]=function(data,filename,raw)
+ fonts.mappings.addtounicode(data,filename)
+end
+local g_directions={
+ gsub_contextchain=1,
+ gpos_contextchain=1,
+ gsub_reversecontextchain=-1,
+ gpos_reversecontextchain=-1,
+}
+actions["reorganize subtables"]=function(data,filename,raw)
+ local resources=data.resources
+ local sequences={}
+ local lookups={}
+ local chainedfeatures={}
+ resources.sequences=sequences
+ resources.lookups=lookups
+ for k=1,#otf.glists do
+ local what=otf.glists[k]
+ local dw=raw[what]
+ if dw then
+ for k=1,#dw do
+ local gk=dw[k]
+ local features=gk.features
+ local typ=gk.type
+ local chain=g_directions[typ] or 0
+ local subtables=gk.subtables
+ if subtables then
+ local t={}
+ for s=1,#subtables do
+ t[s]=subtables[s].name
+ end
+ subtables=t
+ end
+ local flags,markclass=gk.flags,nil
+ if flags then
+ local t={
+ (flags.ignorecombiningmarks and "mark") or false,
+ (flags.ignoreligatures and "ligature") or false,
+ (flags.ignorebaseglyphs and "base") or false,
+ flags.r2l or false,
+ }
+ markclass=flags.mark_class
+ if markclass then
+ markclass=resources.markclasses[markclass]
+ end
+ flags=t
+ end
+ local name=gk.name
+ if not name then
+ report_otf("skipping weird lookup number %s",k)
+ elseif features then
+ local f={}
+ local o={}
+ for i=1,#features do
+ local df=features[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ o[#o+1]=tag
+ end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ sequences[#sequences+1]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ name=name,
+ subtables=subtables,
+ markclass=markclass,
+ features=f,
+ order=o,
+ }
+ else
+ lookups[name]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ subtables=subtables,
+ markclass=markclass,
+ }
+ end
+ end
+ end
+ end
+end
+actions["prepare lookups"]=function(data,filename,raw)
+ local lookups=raw.lookups
+ if lookups then
+ data.lookups=lookups
+ end
+end
+local function t_uncover(splitter,cache,covers)
+ local result={}
+ for n=1,#covers do
+ local cover=covers[n]
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
+ end
+ result[n]=uncovered
+ end
+ return result
+end
+local function s_uncover(splitter,cache,cover)
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
+ end
+ return { uncovered }
+ end
+end
+local function t_hashed(t,cache)
+ if t then
+ local ht={}
+ for i=1,#t do
+ local ti=t[i]
+ local tih=cache[ti]
+ if not tih then
+ local tn=#ti
+ if tn==1 then
+ tih={ [ti[1]]=true }
+ else
+ tih={}
+ for i=1,tn do
+ tih[ti[i]]=true
+ end
+ end
+ cache[ti]=tih
+ end
+ ht[i]=tih
+ end
+ return ht
+ else
+ return nil
+ end
+end
+local function s_hashed(t,cache)
+ if t then
+ local tf=t[1]
+ local nf=#tf
+ if nf==1 then
+ return { [tf[1]]=true }
+ else
+ local ht={}
+ for i=1,nf do
+ ht[i]={ [tf[i]]=true }
+ end
+ return ht
+ end
+ else
+ return nil
+ end
+end
+local function r_uncover(splitter,cache,cover,replacements)
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cover[1]
+ local replaced=cache[replacements]
+ if not replaced then
+ replaced=lpegmatch(splitter,replacements)
+ cache[replacements]=replaced
+ end
+ local nu,nr=#uncovered,#replaced
+ local r={}
+ if nu==nr then
+ for i=1,nu do
+ r[uncovered[i]]=replaced[i]
+ end
+ end
+ return r
+ end
+end
+actions["reorganize lookups"]=function(data,filename,raw)
+ if data.lookups then
+ local helpers=data.helpers
+ local duplicates=data.resources.duplicates
+ local splitter=helpers.tounicodetable
+ local t_u_cache={}
+ local s_u_cache=t_u_cache
+ local t_h_cache={}
+ local s_h_cache=t_h_cache
+ local r_u_cache={}
+ helpers.matchcache=t_h_cache
+ for _,lookup in next,data.lookups do
+ local rules=lookup.rules
+ if rules then
+ local format=lookup.format
+ if format=="class" then
+ local before_class=lookup.before_class
+ if before_class then
+ before_class=t_uncover(splitter,t_u_cache,reversed(before_class))
+ end
+ local current_class=lookup.current_class
+ if current_class then
+ current_class=t_uncover(splitter,t_u_cache,current_class)
+ end
+ local after_class=lookup.after_class
+ if after_class then
+ after_class=t_uncover(splitter,t_u_cache,after_class)
+ end
+ for i=1,#rules do
+ local rule=rules[i]
+ local class=rule.class
+ local before=class.before
+ if before then
+ for i=1,#before do
+ before[i]=before_class[before[i]] or {}
+ end
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=class.current
+ local lookups=rule.lookups
+ if current then
+ for i=1,#current do
+ current[i]=current_class[current[i]] or {}
+ if lookups and not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=class.after
+ if after then
+ for i=1,#after do
+ after[i]=after_class[after[i]] or {}
+ end
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.class=nil
+ end
+ lookup.before_class=nil
+ lookup.current_class=nil
+ lookup.after_class=nil
+ lookup.format="coverage"
+ elseif format=="coverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local coverage=rule.coverage
+ if coverage then
+ local before=coverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=coverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ local lookups=rule.lookups
+ if lookups then
+ for i=1,#current do
+ if not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ end
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=coverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.coverage=nil
+ end
+ end
+ elseif format=="reversecoverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local reversecoverage=rule.reversecoverage
+ if reversecoverage then
+ local before=reversecoverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=reversecoverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=reversecoverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ local replacements=reversecoverage.replacements
+ if replacements then
+ rule.replacements=r_uncover(splitter,r_u_cache,current,replacements)
+ end
+ rule.reversecoverage=nil
+ end
+ end
+ elseif format=="glyphs" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local glyphs=rule.glyphs
+ if glyphs then
+ local fore=glyphs.fore
+ if fore and fore~="" then
+ fore=s_uncover(splitter,s_u_cache,fore)
+ rule.after=s_hashed(fore,s_h_cache)
+ end
+ local back=glyphs.back
+ if back then
+ back=s_uncover(splitter,s_u_cache,back)
+ rule.before=s_hashed(back,s_h_cache)
+ end
+ local names=glyphs.names
+ if names then
+ names=s_uncover(splitter,s_u_cache,names)
+ rule.current=s_hashed(names,s_h_cache)
+ end
+ rule.glyphs=nil
+ local lookups=rule.lookups
+ if lookups then
+ for i=1,#names do
+ if not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+actions["expand lookups"]=function(data,filename,raw)
+ if data.lookups then
+ local cache=data.helpers.matchcache
+ if cache then
+ local duplicates=data.resources.duplicates
+ for key,hash in next,cache do
+ local done=nil
+ for key in next,hash do
+ local unicode=duplicates[key]
+ if not unicode then
+ elseif type(unicode)=="table" then
+ for i=1,#unicode do
+ local u=unicode[i]
+ if hash[u] then
+ elseif done then
+ done[u]=key
+ else
+ done={ [u]=key }
+ end
+ end
+ else
+ if hash[unicode] then
+ elseif done then
+ done[unicode]=key
+ else
+ done={ [unicode]=key }
+ end
+ end
+ end
+ if done then
+ for u in next,done do
+ hash[u]=true
+ end
+ end
+ end
+ end
+ end
+end
+local function check_variants(unicode,the_variants,splitter,unicodes)
+ local variants=the_variants.variants
+ if variants then
+ local glyphs=lpegmatch(splitter,variants)
+ local done={ [unicode]=true }
+ local n=0
+ for i=1,#glyphs do
+ local g=glyphs[i]
+ if done[g] then
+ if i>1 then
+ report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ end
+ else
+ if n==0 then
+ n=1
+ variants={ g }
+ else
+ n=n+1
+ variants[n]=g
+ end
+ done[g]=true
+ end
+ end
+ if n==0 then
+ variants=nil
+ end
+ end
+ local parts=the_variants.parts
+ if parts then
+ local p=#parts
+ if p>0 then
+ for i=1,p do
+ local pi=parts[i]
+ pi.glyph=unicodes[pi.component] or 0
+ pi.component=nil
+ end
+ else
+ parts=nil
+ end
+ end
+ local italic=the_variants.italic
+ if italic and italic==0 then
+ italic=nil
+ end
+ return variants,parts,italic
+end
+actions["analyze math"]=function(data,filename,raw)
+ if raw.math then
+ data.metadata.math=raw.math
+ local unicodes=data.resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ for unicode,description in next,data.descriptions do
+ local glyph=description.glyph
+ local mathkerns=glyph.mathkern
+ local hvariants=glyph.horiz_variants
+ local vvariants=glyph.vert_variants
+ local accent=glyph.top_accent
+ local italic=glyph.italic_correction
+ if mathkerns or hvariants or vvariants or accent or italic then
+ local math={}
+ if accent then
+ math.accent=accent
+ end
+ if mathkerns then
+ for k,v in next,mathkerns do
+ if not next(v) then
+ mathkerns[k]=nil
+ else
+ for k,v in next,v do
+ if v==0 then
+ k[v]=nil
+ end
+ end
+ end
+ end
+ math.kerns=mathkerns
+ end
+ if hvariants then
+ math.hvariants,math.hparts,math.hitalic=check_variants(unicode,hvariants,splitter,unicodes)
+ end
+ if vvariants then
+ math.vvariants,math.vparts,math.vitalic=check_variants(unicode,vvariants,splitter,unicodes)
+ end
+ if italic and italic~=0 then
+ math.italic=italic
+ end
+ description.math=math
+ end
+ end
+ end
+end
+actions["reorganize glyph kerns"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ for unicode,description in next,descriptions do
+ local kerns=description.glyph.kerns
+ if kerns then
+ local newkerns={}
+ for k,kern in next,kerns do
+ local name=kern.char
+ local offset=kern.off
+ local lookup=kern.lookup
+ if name and offset and lookup then
+ local unicode=unicodes[name]
+ if unicode then
+ if type(lookup)=="table" then
+ for l=1,#lookup do
+ local lookup=lookup[l]
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
+ end
+ end
+ else
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
+ end
+ end
+ elseif trace_loading then
+ report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode)
+ end
+ end
+ end
+ description.kerns=newkerns
+ end
+ end
+end
+actions["merge kern classes"]=function(data,filename,raw)
+ local gposlist=raw.gpos
+ if gposlist then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ local ignored=0
+ local blocked=0
+ for gp=1,#gposlist do
+ local gpos=gposlist[gp]
+ local subtables=gpos.subtables
+ if subtables then
+ local first_done={}
+ local split={}
+ for s=1,#subtables do
+ local subtable=subtables[s]
+ local kernclass=subtable.kernclass
+ local lookup=subtable.lookup or subtable.name
+ if kernclass then
+ if #kernclass>0 then
+ kernclass=kernclass[1]
+ lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup
+ report_otf("fixing kernclass table of lookup %a",lookup)
+ end
+ local firsts=kernclass.firsts
+ local seconds=kernclass.seconds
+ local offsets=kernclass.offsets
+ for n,s in next,firsts do
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ local maxseconds=0
+ for n,s in next,seconds do
+ if n>maxseconds then
+ maxseconds=n
+ end
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ for fk=1,#firsts do
+ local fv=firsts[fk]
+ local splt=split[fv]
+ if splt then
+ local extrakerns={}
+ local baseoffset=(fk-1)*maxseconds
+ for sk=2,maxseconds do
+ local sv=seconds[sk]
+ if sv then
+ local splt=split[sv]
+ if splt then
+ local offset=offsets[baseoffset+sk]
+ if offset then
+ for i=1,#splt do
+ extrakerns[splt[i]]=offset
+ end
+ end
+ end
+ end
+ end
+ for i=1,#splt do
+ local first_unicode=splt[i]
+ if first_done[first_unicode] then
+ report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode)
+ blocked=blocked+1
+ else
+ first_done[first_unicode]=true
+ local description=descriptions[first_unicode]
+ if description then
+ local kerns=description.kerns
+ if not kerns then
+ kerns={}
+ description.kerns=kerns
+ end
+ local lookupkerns=kerns[lookup]
+ if not lookupkerns then
+ lookupkerns={}
+ kerns[lookup]=lookupkerns
+ end
+ if overloadkerns then
+ for second_unicode,kern in next,extrakerns do
+ lookupkerns[second_unicode]=kern
+ end
+ else
+ for second_unicode,kern in next,extrakerns do
+ local k=lookupkerns[second_unicode]
+ if not k then
+ lookupkerns[second_unicode]=kern
+ elseif k~=kern then
+ if trace_loading then
+ report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern)
+ end
+ ignored=ignored+1
+ end
+ end
+ end
+ elseif trace_loading then
+ report_otf("no glyph data for %U",first_unicode)
+ end
+ end
+ end
+ end
+ end
+ subtable.kernclass={}
+ end
+ end
+ end
+ end
+ if ignored>0 then
+ report_otf("%s kern overloads ignored",ignored)
+ end
+ if blocked>0 then
+ report_otf("%s successive kerns blocked",blocked)
+ end
+ end
+end
+actions["check glyphs"]=function(data,filename,raw)
+ for unicode,description in next,data.descriptions do
+ description.glyph=nil
+ end
+end
+local valid=(R("\x00\x7E")-S("(){}[]<>%/ \n\r\f\v"))^0*P(-1)
+local function valid_ps_name(str)
+ return str and str~="" and #str<64 and lpegmatch(valid,str) and true or false
+end
+actions["check metadata"]=function(data,filename,raw)
+ local metadata=data.metadata
+ for _,k in next,mainfields do
+ if valid_fields[k] then
+ local v=raw[k]
+ if not metadata[k] then
+ metadata[k]=v
+ end
+ end
+ end
+ local ttftables=metadata.ttf_tables
+ if ttftables then
+ for i=1,#ttftables do
+ ttftables[i].data="deleted"
+ end
+ end
+ local names=raw.names
+ if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
+ local function valid(what)
+ if names then
+ for i=1,#names do
+ local list=names[i]
+ local names=list.names
+ if names then
+ local name=names[what]
+ if name and valid_ps_name(name) then
+ return name
+ end
+ end
+ end
+ end
+ end
+ local function check(what)
+ local oldname=metadata[what]
+ if valid_ps_name(oldname) then
+ report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname)
+ else
+ local newname=valid(what)
+ if not newname then
+ newname=formatters["bad-%s-%s"](what,file.nameonly(filename))
+ end
+ local warning=formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname)
+ data.warnings[#data.warnings+1]=warning
+ report_otf(warning)
+ metadata[what]=newname
+ end
+ end
+ check("fontname")
+ check("fullname")
+ end
+ if names then
+ local psname=metadata.psname
+ if not psname or psname=="" then
+ for i=1,#names do
+ local name=names[i]
+ if lower(name.lang)=="english (us)" then
+ local specification=name.names
+ if specification then
+ local postscriptname=specification.postscriptname
+ if postscriptname then
+ psname=postscriptname
+ end
+ end
+ end
+ break
+ end
+ end
+ if psname~=metadata.fontname then
+ report_otf("fontname %a, fullname %a, psname %a",metadata.fontname,metadata.fullname,psname)
+ end
+ metadata.psname=psname
+ end
+end
+actions["cleanup tables"]=function(data,filename,raw)
+ local duplicates=data.resources.duplicates
+ if duplicates then
+ for k,v in next,duplicates do
+ if #v==1 then
+ duplicates[k]=v[1]
+ end
+ end
+ end
+ data.resources.indices=nil
+ data.resources.unicodes=nil
+ data.helpers=nil
+end
+actions["reorganize glyph lookups"]=function(data,filename,raw)
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local descriptions=data.descriptions
+ local splitter=data.helpers.tounicodelist
+ local lookuptypes=resources.lookuptypes
+ for unicode,description in next,descriptions do
+ local lookups=description.glyph.lookups
+ if lookups then
+ for tag,lookuplist in next,lookups do
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local specification=lookup.specification
+ local lookuptype=lookup.type
+ local lt=lookuptypes[tag]
+ if not lt then
+ lookuptypes[tag]=lookuptype
+ elseif lt~=lookuptype then
+ report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype)
+ end
+ if lookuptype=="ligature" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="alternate" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="substitution" then
+ lookuplist[l]=unicodes[specification.variant]
+ elseif lookuptype=="multiple" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="position" then
+ lookuplist[l]={
+ specification.x or 0,
+ specification.y or 0,
+ specification.h or 0,
+ specification.v or 0
+ }
+ elseif lookuptype=="pair" then
+ local one=specification.offsets[1]
+ local two=specification.offsets[2]
+ local paired=unicodes[specification.paired]
+ if one then
+ if two then
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } }
+ else
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } }
+ end
+ else
+ if two then
+ lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} }
+ else
+ lookuplist[l]={ paired }
+ end
+ end
+ end
+ end
+ end
+ local slookups,mlookups
+ for tag,lookuplist in next,lookups do
+ if #lookuplist==1 then
+ if slookups then
+ slookups[tag]=lookuplist[1]
+ else
+ slookups={ [tag]=lookuplist[1] }
+ end
+ else
+ if mlookups then
+ mlookups[tag]=lookuplist
+ else
+ mlookups={ [tag]=lookuplist }
+ end
+ end
+ end
+ if slookups then
+ description.slookups=slookups
+ end
+ if mlookups then
+ description.mlookups=mlookups
+ end
+ end
+ end
+end
+local zero={ 0,0 }
+actions["reorganize glyph anchors"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ for unicode,description in next,descriptions do
+ local anchors=description.glyph.anchors
+ if anchors then
+ for class,data in next,anchors do
+ if class=="baselig" then
+ for tag,specification in next,data do
+ local n=0
+ for k,v in next,specification do
+ if k>n then
+ n=k
+ end
+ local x,y=v.x,v.y
+ if x or y then
+ specification[k]={ x or 0,y or 0 }
+ else
+ specification[k]=zero
+ end
+ end
+ local t={}
+ for i=1,n do
+ t[i]=specification[i] or zero
+ end
+ data[tag]=t
+ end
+ else
+ for tag,specification in next,data do
+ local x,y=specification.x,specification.y
+ if x or y then
+ data[tag]={ x or 0,y or 0 }
+ else
+ data[tag]=zero
+ end
+ end
+ end
+ end
+ description.anchors=anchors
+ end
+ end
+end
+local bogusname=(P("uni")+P("u"))*R("AF","09")^4+(P("index")+P("glyph")+S("Ii")*P("dentity")*P(".")^0)*R("09")^1
+local uselessname=(1-bogusname)^0*bogusname
+actions["purge names"]=function(data,filename,raw)
+ if purge_names then
+ local n=0
+ for u,d in next,data.descriptions do
+ if lpegmatch(uselessname,d.name) then
+ n=n+1
+ d.name=nil
+ end
+ end
+ if n>0 then
+ report_otf("%s bogus names removed",n)
+ end
+ end
+end
+actions["compact lookups"]=function(data,filename,raw)
+ if not compact_lookups then
+ report_otf("not compacting")
+ return
+ end
+ local last=0
+ local tags=table.setmetatableindex({},
+ function(t,k)
+ last=last+1
+ t[k]=last
+ return last
+ end
+ )
+ local descriptions=data.descriptions
+ local resources=data.resources
+ for u,d in next,descriptions do
+ local slookups=d.slookups
+ if type(slookups)=="table" then
+ local s={}
+ for k,v in next,slookups do
+ s[tags[k]]=v
+ end
+ d.slookups=s
+ end
+ local mlookups=d.mlookups
+ if type(mlookups)=="table" then
+ local m={}
+ for k,v in next,mlookups do
+ m[tags[k]]=v
+ end
+ d.mlookups=m
+ end
+ local kerns=d.kerns
+ if type(kerns)=="table" then
+ local t={}
+ for k,v in next,kerns do
+ t[tags[k]]=v
+ end
+ d.kerns=t
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ local l={}
+ for k,v in next,lookups do
+ local rules=v.rules
+ if rules then
+ for i=1,#rules do
+ local l=rules[i].lookups
+ if type(l)=="table" then
+ for i=1,#l do
+ l[i]=tags[l[i]]
+ end
+ end
+ end
+ end
+ l[tags[k]]=v
+ end
+ data.lookups=l
+ end
+ local lookups=resources.lookups
+ if lookups then
+ local l={}
+ for k,v in next,lookups do
+ local s=v.subtables
+ if type(s)=="table" then
+ for i=1,#s do
+ s[i]=tags[s[i]]
+ end
+ end
+ l[tags[k]]=v
+ end
+ resources.lookups=l
+ end
+ local sequences=resources.sequences
+ if sequences then
+ for i=1,#sequences do
+ local s=sequences[i]
+ local n=s.name
+ if n then
+ s.name=tags[n]
+ end
+ local t=s.subtables
+ if type(t)=="table" then
+ for i=1,#t do
+ t[i]=tags[t[i]]
+ end
+ end
+ end
+ end
+ local lookuptypes=resources.lookuptypes
+ if lookuptypes then
+ local l={}
+ for k,v in next,lookuptypes do
+ l[tags[k]]=v
+ end
+ resources.lookuptypes=l
+ end
+ local anchor_to_lookup=resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor,lookups in next,anchor_to_lookup do
+ local l={}
+ for lookup,value in next,lookups do
+ l[tags[lookup]]=value
+ end
+ anchor_to_lookup[anchor]=l
+ end
+ end
+ local lookup_to_anchor=resources.lookup_to_anchor
+ if lookup_to_anchor then
+ local l={}
+ for lookup,value in next,lookup_to_anchor do
+ l[tags[lookup]]=value
+ end
+ resources.lookup_to_anchor=l
+ end
+ tags=table.swapped(tags)
+ report_otf("%s lookup tags compacted",#tags)
+ resources.lookuptags=tags
+end
+function otf.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
+ if okay then
+ return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
+ else
+ return {}
+ end
+end
+local function copytotfm(data,cache_id)
+ if data then
+ local metadata=data.metadata
+ local warnings=data.warnings
+ local resources=data.resources
+ local properties=derivetable(data.properties)
+ local descriptions=derivetable(data.descriptions)
+ local goodies=derivetable(data.goodies)
+ local characters={}
+ local parameters={}
+ local mathparameters={}
+ local pfminfo=metadata.pfminfo or {}
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local spaceunits=500
+ local spacer="space"
+ local designsize=metadata.designsize or metadata.design_size or 100
+ local minsize=metadata.minsize or metadata.design_range_bottom or designsize
+ local maxsize=metadata.maxsize or metadata.design_range_top or designsize
+ local mathspecs=metadata.math
+ if designsize==0 then
+ designsize=100
+ minsize=100
+ maxsize=100
+ end
+ if mathspecs then
+ for name,value in next,mathspecs do
+ mathparameters[name]=value
+ end
+ end
+ for unicode,_ in next,data.descriptions do
+ characters[unicode]={}
+ end
+ if mathspecs then
+ for unicode,character in next,characters do
+ local d=descriptions[unicode]
+ local m=d.math
+ if m then
+ local italic=m.italic
+ local vitalic=m.vitalic
+ local variants=m.hvariants
+ local parts=m.hparts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.horiz_variants=parts
+ elseif parts then
+ character.horiz_variants=parts
+ italic=m.hitalic
+ end
+ local variants=m.vvariants
+ local parts=m.vparts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.vert_variants=parts
+ elseif parts then
+ character.vert_variants=parts
+ end
+ if italic and italic~=0 then
+ character.italic=italic
+ end
+ if vitalic and vitalic~=0 then
+ character.vert_italic=vitalic
+ end
+ local accent=m.accent
+ if accent then
+ character.accent=accent
+ end
+ local kerns=m.kerns
+ if kerns then
+ character.mathkerns=kerns
+ end
+ end
+ end
+ end
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname
+ local fullname=metadata.fullname or fontname
+ local psname=metadata.psname or fontname or fullname
+ local units=metadata.units or metadata.units_per_em or 1000
+ if units==0 then
+ units=1000
+ metadata.units=1000
+ report_otf("changing %a units to %a",0,units)
+ end
+ local monospaced=metadata.monospaced or metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced")
+ local charwidth=pfminfo.avgwidth
+ local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight
+ local italicangle=metadata.italicangle
+ properties.monospaced=monospaced
+ parameters.italicangle=italicangle
+ parameters.charwidth=charwidth
+ parameters.charxheight=charxheight
+ local space=0x0020
+ local emdash=0x2014
+ if monospaced then
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width,"emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ else
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width/2,"emdash/2"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ end
+ spaceunits=tonumber(spaceunits) or 500
+ parameters.slant=0
+ parameters.space=spaceunits
+ parameters.space_stretch=units/2
+ parameters.space_shrink=1*units/3
+ parameters.x_height=2*units/5
+ parameters.quad=units
+ if spaceunits<2*units/5 then
+ end
+ if italicangle and italicangle~=0 then
+ parameters.italicangle=italicangle
+ parameters.italicfactor=math.cos(math.rad(90+italicangle))
+ parameters.slant=- math.tan(italicangle*math.pi/180)
+ end
+ if monospaced then
+ parameters.space_stretch=0
+ parameters.space_shrink=0
+ elseif syncspace then
+ parameters.space_stretch=spaceunits/2
+ parameters.space_shrink=spaceunits/3
+ end
+ parameters.extra_space=parameters.space_shrink
+ if charxheight then
+ parameters.x_height=charxheight
+ else
+ local x=0x0078
+ if x then
+ local x=descriptions[x]
+ if x then
+ parameters.x_height=x.height
+ end
+ end
+ end
+ parameters.designsize=(designsize/10)*65536
+ parameters.minsize=(minsize/10)*65536
+ parameters.maxsize=(maxsize/10)*65536
+ parameters.ascender=abs(metadata.ascender or metadata.ascent or 0)
+ parameters.descender=abs(metadata.descender or metadata.descent or 0)
+ parameters.units=units
+ properties.space=spacer
+ properties.encodingbytes=2
+ properties.format=data.format or otf_format(filename) or formats.otf
+ properties.noglyphnames=true
+ properties.filename=filename
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.psname=psname
+ properties.name=filename or fullname
+ if warnings and #warnings>0 then
+ report_otf("warnings for font: %s",filename)
+ report_otf()
+ for i=1,#warnings do
+ report_otf(" %s",warnings[i])
+ end
+ report_otf()
+ end
+ return {
+ characters=characters,
+ descriptions=descriptions,
+ parameters=parameters,
+ mathparameters=mathparameters,
+ resources=resources,
+ properties=properties,
+ goodies=goodies,
+ warnings=warnings,
+ }
+ end
+end
+local function otftotfm(specification)
+ local cache_id=specification.hash
+ local tfmdata=containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local name=specification.name
+ local sub=specification.sub
+ local filename=specification.filename
+ local features=specification.features.normal
+ local rawdata=otf.load(filename,sub,features and features.featurefile)
+ if rawdata and next(rawdata) then
+ local descriptions=rawdata.descriptions
+ local duplicates=rawdata.resources.duplicates
+ if duplicates then
+ local nofduplicates,nofduplicated=0,0
+ for parent,list in next,duplicates do
+ if type(list)=="table" then
+ local n=#list
+ for i=1,n do
+ local unicode=list[i]
+ if not descriptions[unicode] then
+ descriptions[unicode]=descriptions[parent]
+ nofduplicated=nofduplicated+1
+ end
+ end
+ nofduplicates=nofduplicates+n
+ else
+ if not descriptions[list] then
+ descriptions[list]=descriptions[parent]
+ nofduplicated=nofduplicated+1
+ end
+ nofduplicates=nofduplicates+1
+ end
+ end
+ if trace_otf and nofduplicated~=nofduplicates then
+ report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
+ end
+ end
+ rawdata.lookuphash={}
+ tfmdata=copytotfm(rawdata,cache_id)
+ if tfmdata and next(tfmdata) then
+ local features=constructors.checkedfeatures("otf",features)
+ local shared=tfmdata.shared
+ if not shared then
+ shared={}
+ tfmdata.shared=shared
+ end
+ shared.rawdata=rawdata
+ shared.dynamics={}
+ tfmdata.changed={}
+ shared.features=features
+ shared.processes=otf.setfeatures(tfmdata,features)
+ end
+ end
+ containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+end
+local function read_from_otf(specification)
+ local tfmdata=otftotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name=specification.name
+ tfmdata.properties.sub=specification.sub
+ tfmdata=constructors.scale(tfmdata,specification)
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
+ constructors.setname(tfmdata,specification)
+ fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
+ end
+ return tfmdata
+end
+local function checkmathsize(tfmdata,mathsize)
+ local mathdata=tfmdata.shared.rawdata.metadata.math
+ local mathsize=tonumber(mathsize)
+ if mathdata then
+ local parameters=tfmdata.parameters
+ parameters.scriptpercentage=mathdata.ScriptPercentScaleDown
+ parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown
+ parameters.mathsize=mathsize
+ end
+end
+registerotffeature {
+ name="mathsize",
+ description="apply mathsize specified in the font",
+ initializers={
+ base=checkmathsize,
+ node=checkmathsize,
+ }
+}
+function otf.collectlookups(rawdata,kind,script,language)
+ local sequences=rawdata.resources.sequences
+ if sequences then
+ local featuremap,featurelist={},{}
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local features=sequence.features
+ features=features and features[kind]
+ features=features and (features[script] or features[default] or features[wildcard])
+ features=features and (features[language] or features[default] or features[wildcard])
+ if features then
+ local subtables=sequence.subtables
+ if subtables then
+ for s=1,#subtables do
+ local ss=subtables[s]
+ if not featuremap[s] then
+ featuremap[ss]=true
+ featurelist[#featurelist+1]=ss
+ end
+ end
+ end
+ end
+ end
+ if #featurelist>0 then
+ return featuremap,featurelist
+ end
+ end
+ return nil,nil
+end
+local function check_otf(forced,specification,suffix)
+ local name=specification.name
+ if forced then
+ name=specification.forcedname
+ end
+ local fullname=findbinfile(name,suffix) or ""
+ if fullname=="" then
+ fullname=fonts.names.getfilename(name,suffix) or ""
+ end
+ if fullname~="" and not fonts.names.ignoredfile(fullname) then
+ specification.filename=fullname
+ return read_from_otf(specification)
+ end
+end
+local function opentypereader(specification,suffix)
+ local forced=specification.forced or ""
+ if formats[forced] then
+ return check_otf(true,specification,forced)
+ else
+ return check_otf(false,specification,suffix)
+ end
+end
+readers.opentype=opentypereader
+function readers.otf (specification) return opentypereader(specification,"otf") end
+function readers.ttf (specification) return opentypereader(specification,"ttf") end
+function readers.ttc (specification) return opentypereader(specification,"ttf") end
+function readers.dfont(specification) return opentypereader(specification,"ttf") end
+function otf.scriptandlanguage(tfmdata,attr)
+ local properties=tfmdata.properties
+ return properties.script or "dflt",properties.language or "dflt"
+end
+local function justset(coverage,unicode,replacement)
+ coverage[unicode]=replacement
+end
+otf.coverup={
+ stepkey="subtables",
+ actions={
+ substitution=justset,
+ alternate=justset,
+ multiple=justset,
+ ligature=justset,
+ kern=justset,
+ },
+ register=function(coverage,lookuptype,format,feature,n,descriptions,resources)
+ local name=formatters["ctx_%s_%s"](feature,n)
+ if lookuptype=="kern" then
+ resources.lookuptypes[name]="position"
+ else
+ resources.lookuptypes[name]=lookuptype
+ end
+ for u,c in next,coverage do
+ local description=descriptions[u]
+ local slookups=description.slookups
+ if slookups then
+ slookups[name]=c
+ else
+ description.slookups={ [name]=c }
+ end
+ end
+ return name
+ end
+}
+local function getgsub(tfmdata,k,kind)
+ local description=tfmdata.descriptions[k]
+ if description then
+ local slookups=description.slookups
+ if slookups then
+ local shared=tfmdata.shared
+ local rawdata=shared and shared.rawdata
+ if rawdata then
+ local lookuptypes=rawdata.resources.lookuptypes
+ if lookuptypes then
+ local properties=tfmdata.properties
+ local validlookups,lookuplist=otf.collectlookups(rawdata,kind,properties.script,properties.language)
+ if validlookups then
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local found=slookups[lookup]
+ if found then
+ return found,lookuptypes[lookup]
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+otf.getgsub=getgsub
+function otf.getsubstitution(tfmdata,k,kind,value)
+ local found,kind=getgsub(tfmdata,k,kind)
+ if not found then
+ elseif kind=="substitution" then
+ return found
+ elseif kind=="alternate" then
+ local choice=tonumber(value) or 1
+ return found[choice] or found[1] or k
+ end
+ return k
+end
+otf.getalternate=otf.getsubstitution
+function otf.getmultiple(tfmdata,k,kind)
+ local found,kind=getgsub(tfmdata,k,kind)
+ if found and kind=="multiple" then
+ return found
+ end
+ return { k }
+end
+function otf.getkern(tfmdata,left,right,kind)
+ local kerns=getgsub(tfmdata,left,kind or "kern",true)
+ if kerns then
+ local found=kerns[right]
+ local kind=type(found)
+ if kind=="table" then
+ found=found[1][3]
+ elseif kind~="number" then
+ found=false
+ end
+ if found then
+ return found*tfmdata.parameters.factor
+ end
+ end
+ return 0
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otb']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat=table.concat
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring,rawget=type,next,tonumber,tostring,rawget
+local lpegmatch=lpeg.match
+local utfchar=utf.char
+local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end)
+local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end)
+local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end)
+local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end)
+local report_prepare=logs.reporter("fonts","otf prepare")
+local fonts=fonts
+local otf=fonts.handlers.otf
+local otffeatures=otf.features
+local registerotffeature=otffeatures.register
+otf.defaultbasealternate="none"
+local wildcard="*"
+local default="dflt"
+local formatters=string.formatters
+local f_unicode=formatters["%U"]
+local f_uniname=formatters["%U (%s)"]
+local f_unilist=formatters["% t (% t)"]
+local function gref(descriptions,n)
+ if type(n)=="number" then
+ local name=descriptions[n].name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num,nam,j={},{},0
+ for i=1,#n do
+ local ni=n[i]
+ if tonumber(ni) then
+ j=j+1
+ local di=descriptions[ni]
+ num[j]=f_unicode(ni)
+ nam[j]=di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in base mode tracing>"
+ end
+end
+local function cref(feature,lookuptags,lookupname)
+ if lookupname then
+ return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname])
+ else
+ return formatters["feature %a"](feature)
+ end
+end
+local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment)
+ report_prepare("%s: base alternate %s => %s (%S => %S)",
+ cref(feature,lookuptags,lookupname),
+ gref(descriptions,unicode),
+ replacement and gref(descriptions,replacement),
+ value,
+ comment)
+end
+local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution)
+ report_prepare("%s: base substitution %s => %S",
+ cref(feature,lookuptags,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,substitution))
+end
+local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature)
+ report_prepare("%s: base ligature %s => %S",
+ cref(feature,lookuptags,lookupname),
+ gref(descriptions,ligature),
+ gref(descriptions,unicode))
+end
+local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value)
+ report_prepare("%s: base kern %s + %s => %S",
+ cref(feature,lookuptags,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,otherunicode),
+ value)
+end
+local basemethods={}
+local basemethod="<unset>"
+local function applybasemethod(what,...)
+ local m=basemethods[basemethod][what]
+ if m then
+ return m(...)
+ end
+end
+local basehash,basehashes,applied={},1,{}
+local function registerbasehash(tfmdata)
+ local properties=tfmdata.properties
+ local hash=concat(applied," ")
+ local base=basehash[hash]
+ if not base then
+ basehashes=basehashes+1
+ base=basehashes
+ basehash[hash]=base
+ end
+ properties.basehash=base
+ properties.fullname=properties.fullname.."-"..base
+ applied={}
+end
+local function registerbasefeature(feature,value)
+ applied[#applied+1]=feature.."="..tostring(value)
+end
+local trace=false
+local function finalize_ligatures(tfmdata,ligatures)
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local unicodes=resources.unicodes
+ local private=resources.private
+ local alldone=false
+ while not alldone do
+ local done=0
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ if ligature then
+ local unicode,lookupdata=ligature[1],ligature[2]
+ if trace_ligatures_detail then
+ report_prepare("building % a into %a",lookupdata,unicode)
+ end
+ local size=#lookupdata
+ local firstcode=lookupdata[1]
+ local firstdata=characters[firstcode]
+ local okay=false
+ if firstdata then
+ local firstname="ctx_"..firstcode
+ for i=1,size-1 do
+ local firstdata=characters[firstcode]
+ if not firstdata then
+ firstcode=private
+ if trace_ligatures_detail then
+ report_prepare("defining %a as %a",firstname,firstcode)
+ end
+ unicodes[firstname]=firstcode
+ firstdata={ intermediate=true,ligatures={} }
+ characters[firstcode]=firstdata
+ descriptions[firstcode]={ name=firstname }
+ private=private+1
+ end
+ local target
+ local secondcode=lookupdata[i+1]
+ local secondname=firstname.."_"..secondcode
+ if i==size-1 then
+ target=unicode
+ if not rawget(unicodes,secondname) then
+ unicodes[secondname]=unicode
+ end
+ okay=true
+ else
+ target=rawget(unicodes,secondname)
+ if not target then
+ break
+ end
+ end
+ if trace_ligatures_detail then
+ report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
+ end
+ local firstligs=firstdata.ligatures
+ if firstligs then
+ firstligs[secondcode]={ char=target }
+ else
+ firstdata.ligatures={ [secondcode]={ char=target } }
+ end
+ firstcode=target
+ firstname=secondname
+ end
+ elseif trace_ligatures_detail then
+ report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target)
+ end
+ if okay then
+ ligatures[i]=false
+ done=done+1
+ end
+ end
+ end
+ alldone=done==0
+ end
+ if trace_ligatures_detail then
+ for k,v in table.sortedhash(characters) do
+ if v.ligatures then
+ table.print(v,k)
+ end
+ end
+ end
+ resources.private=private
+ return true
+ end
+end
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local properties=tfmdata.properties
+ local changed=tfmdata.changed
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local lookuptags=resources.lookuptags
+ local ligatures={}
+ local alternate=tonumber(value) or true and 1
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ local actions={
+ substitution=function(lookupdata,lookuptags,lookupname,description,unicode)
+ if trace_singles then
+ report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
+ end
+ changed[unicode]=lookupdata
+ end,
+ alternate=function(lookupdata,lookuptags,lookupname,description,unicode)
+ local replacement=lookupdata[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt=="first" then
+ replacement=lookupdata[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt=="last" then
+ replacement=lookupdata[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ end,
+ ligature=function(lookupdata,lookuptags,lookupname,description,unicode)
+ if trace_ligatures then
+ report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
+ end
+ ligatures[#ligatures+1]={ unicode,lookupdata }
+ end,
+ }
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local lookups=description.slookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookups[lookupname]
+ if lookupdata then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ action(lookupdata,lookuptags,lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookuplist=lookups[lookupname]
+ if lookuplist then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ for i=1,#lookuplist do
+ action(lookuplist[i],lookuptags,lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+ properties.hasligatures=finalize_ligatures(tfmdata,ligatures)
+end
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local properties=tfmdata.properties
+ local lookuptags=resources.lookuptags
+ local sharedkerns={}
+ local traceindeed=trace_baseinit and trace_kerns
+ local haskerns=false
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local rawkerns=description.kerns
+ if rawkerns then
+ local s=sharedkerns[rawkerns]
+ if s==false then
+ elseif s then
+ character.kerns=s
+ else
+ local newkerns=character.kerns
+ local done=false
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local kerns=rawkerns[lookup]
+ if kerns then
+ for otherunicode,value in next,kerns do
+ if value==0 then
+ elseif not newkerns then
+ newkerns={ [otherunicode]=value }
+ done=true
+ if traceindeed then
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
+ end
+ elseif not newkerns[otherunicode] then
+ newkerns[otherunicode]=value
+ done=true
+ if traceindeed then
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
+ end
+ end
+ end
+ end
+ end
+ if done then
+ sharedkerns[rawkerns]=newkerns
+ character.kerns=newkerns
+ haskerns=true
+ else
+ sharedkerns[rawkerns]=false
+ end
+ end
+ end
+ end
+ properties.haskerns=haskerns
+end
+basemethods.independent={
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
+}
+local function makefake(tfmdata,name,present)
+ local resources=tfmdata.resources
+ local private=resources.private
+ local character={ intermediate=true,ligatures={} }
+ resources.unicodes[name]=private
+ tfmdata.characters[private]=character
+ tfmdata.descriptions[private]={ name=name }
+ resources.private=private+1
+ present[name]=private
+ return character
+end
+local function make_1(present,tree,name)
+ for k,v in next,tree do
+ if k=="ligature" then
+ present[name]=v
+ else
+ make_1(present,v,name.."_"..k)
+ end
+ end
+end
+local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname)
+ for k,v in next,tree do
+ if k=="ligature" then
+ local character=characters[preceding]
+ if not character then
+ if trace_baseinit then
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding)
+ end
+ character=makefake(tfmdata,name,present)
+ end
+ local ligatures=character.ligatures
+ if ligatures then
+ ligatures[unicode]={ char=v }
+ else
+ character.ligatures={ [unicode]={ char=v } }
+ end
+ if done then
+ local d=done[lookupname]
+ if not d then
+ done[lookupname]={ "dummy",v }
+ else
+ d[#d+1]=v
+ end
+ end
+ else
+ local code=present[name] or unicode
+ local name=name.."_"..k
+ make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname)
+ end
+ end
+end
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local changed=tfmdata.changed
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local lookuptags=resources.lookuptags
+ local ligatures={}
+ local alternate=tonumber(value) or true and 1
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ local lookuptype=lookuptypes[lookupname]
+ for unicode,data in next,lookupdata do
+ if lookuptype=="substitution" then
+ if trace_singles then
+ report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data)
+ end
+ changed[unicode]=data
+ elseif lookuptype=="alternate" then
+ local replacement=data[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt=="first" then
+ replacement=data[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt=="last" then
+ replacement=data[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ elseif lookuptype=="ligature" then
+ ligatures[#ligatures+1]={ unicode,data,lookupname }
+ if trace_ligatures then
+ report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data)
+ end
+ end
+ end
+ end
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local present={}
+ local done=trace_baseinit and trace_ligatures and {}
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree=ligature[1],ligature[2]
+ make_1(present,tree,"ctx_"..unicode)
+ end
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3]
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname)
+ end
+ end
+end
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local properties=tfmdata.properties
+ local lookuphash=resources.lookuphash
+ local lookuptags=resources.lookuptags
+ local traceindeed=trace_baseinit and trace_kerns
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ for unicode,data in next,lookupdata do
+ local character=characters[unicode]
+ local kerns=character.kerns
+ if not kerns then
+ kerns={}
+ character.kerns=kerns
+ end
+ if traceindeed then
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern)
+ end
+ end
+ else
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ end
+ end
+ end
+ end
+ end
+end
+local function initializehashes(tfmdata)
+ nodeinitializers.features(tfmdata)
+end
+basemethods.shared={
+ initializehashes=initializehashes,
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
+}
+basemethod="independent"
+local function featuresinitializer(tfmdata,value)
+ if true then
+ local starttime=trace_preparing and os.clock()
+ local features=tfmdata.shared.features
+ local fullname=tfmdata.properties.fullname or "?"
+ if features then
+ applybasemethod("initializehashes",tfmdata)
+ local collectlookups=otf.collectlookups
+ local rawdata=tfmdata.shared.rawdata
+ local properties=tfmdata.properties
+ local script=properties.script
+ local language=properties.language
+ local basesubstitutions=rawdata.resources.features.gsub
+ local basepositionings=rawdata.resources.features.gpos
+ if basesubstitutions or basepositionings then
+ local sequences=tfmdata.resources.sequences
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local sfeatures=sequence.features
+ if sfeatures then
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local feature=order[i]
+ local value=features[feature]
+ if value then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if not validlookups then
+ elseif basesubstitutions and basesubstitutions[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value)
+ end
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ elseif basepositionings and basepositionings[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value)
+ end
+ applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ registerbasehash(tfmdata)
+ end
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname)
+ end
+ end
+end
+registerotffeature {
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ base=featuresinitializer,
+ }
+}
+directives.register("fonts.otf.loader.basemethod",function(v)
+ if basemethods[v] then
+ basemethod=v
+ end
+end)
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-inj']={
+ version=1.001,
+ comment="companion to font-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+if not nodes.properties then return end
+local next,rawget=next,rawget
+local utfchar=utf.char
+local fastcopy=table.fastcopy
+local trace_injections=false trackers.register("fonts.injections",function(v) trace_injections=v end)
+local report_injections=logs.reporter("fonts","injections")
+local attributes,nodes,node=attributes,nodes,node
+fonts=fonts
+local fontdata=fonts.hashes.identifiers
+nodes.injections=nodes.injections or {}
+local injections=nodes.injections
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local disc_code=nodecodes.disc
+local kern_code=nodecodes.kern
+local nuts=nodes.nuts
+local nodepool=nuts.pool
+local newkern=nodepool.kern
+local tonode=nuts.tonode
+local tonut=nuts.tonut
+local getfield=nuts.getfield
+local setfield=nuts.setfield
+local getnext=nuts.getnext
+local getprev=nuts.getprev
+local getid=nuts.getid
+local getfont=nuts.getfont
+local getsubtype=nuts.getsubtype
+local getchar=nuts.getchar
+local traverse_id=nuts.traverse_id
+local insert_node_before=nuts.insert_before
+local insert_node_after=nuts.insert_after
+local find_tail=nuts.tail
+local properties=nodes.properties.data
+function injections.installnewkern(nk)
+ newkern=nk or newkern
+end
+local nofregisteredkerns=0
+local nofregisteredpairs=0
+local nofregisteredmarks=0
+local nofregisteredcursives=0
+local keepregisteredcounts=false
+function injections.keepcounts()
+ keepregisteredcounts=true
+end
+function injections.resetcounts()
+ nofregisteredkerns=0
+ nofregisteredpairs=0
+ nofregisteredmarks=0
+ nofregisteredcursives=0
+ keepregisteredcounts=false
+end
+function injections.reset(n)
+ local p=rawget(properties,n)
+ if p and rawget(p,"injections") then
+ p.injections=nil
+ end
+end
+function injections.copy(target,source)
+ local sp=rawget(properties,source)
+ if sp then
+ local tp=rawget(properties,target)
+ local si=rawget(sp,"injections")
+ if si then
+ si=fastcopy(si)
+ if tp then
+ tp.injections=si
+ else
+ propertydata[target]={
+ injections=si,
+ }
+ end
+ else
+ if tp then
+ tp.injections=nil
+ end
+ end
+ end
+end
+function injections.setligaindex(n,index)
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.ligaindex=index
+ else
+ p.injections={
+ ligaindex=index
+ }
+ end
+ else
+ properties[n]={
+ injections={
+ ligaindex=index
+ }
+ }
+ end
+end
+function injections.getligaindex(n,default)
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ return i.ligaindex or default
+ end
+ end
+ return default
+end
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+ local dx=factor*(exit[1]-entry[1])
+ local dy=-factor*(exit[2]-entry[2])
+ local ws=tfmstart.width
+ local wn=tfmnext.width
+ nofregisteredcursives=nofregisteredcursives+1
+ if rlmode<0 then
+ dx=-(dx+wn)
+ else
+ dx=dx-ws
+ end
+ local p=rawget(properties,start)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.cursiveanchor=true
+ else
+ p.injections={
+ cursiveanchor=true,
+ }
+ end
+ else
+ properties[start]={
+ injections={
+ cursiveanchor=true,
+ },
+ }
+ end
+ local p=rawget(properties,nxt)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.cursivex=dx
+ i.cursivey=dy
+ else
+ p.injections={
+ cursivex=dx,
+ cursivey=dy,
+ }
+ end
+ else
+ properties[nxt]={
+ injections={
+ cursivex=dx,
+ cursivey=dy,
+ },
+ }
+ end
+ return dx,dy,nofregisteredcursives
+end
+function injections.setpair(current,factor,rlmode,r2lflag,spec,injection)
+ local x=factor*spec[1]
+ local y=factor*spec[2]
+ local w=factor*spec[3]
+ local h=factor*spec[4]
+ if x~=0 or w~=0 or y~=0 or h~=0 then
+ local yoffset=y-h
+ local leftkern=x
+ local rightkern=w-x
+ if leftkern~=0 or rightkern~=0 or yoffset~=0 then
+ nofregisteredpairs=nofregisteredpairs+1
+ if rlmode and rlmode<0 then
+ leftkern,rightkern=rightkern,leftkern
+ end
+ if not injection then
+ injection="injections"
+ end
+ local p=rawget(properties,current)
+ if p then
+ local i=rawget(p,injection)
+ if i then
+ if leftkern~=0 then
+ i.leftkern=(i.leftkern or 0)+leftkern
+ end
+ if rightkern~=0 then
+ i.rightkern=(i.rightkern or 0)+rightkern
+ end
+ if yoffset~=0 then
+ i.yoffset=(i.yoffset or 0)+yoffset
+ end
+ elseif leftkern~=0 or rightkern~=0 then
+ p[injection]={
+ leftkern=leftkern,
+ rightkern=rightkern,
+ yoffset=yoffset,
+ }
+ else
+ p[injection]={
+ yoffset=yoffset,
+ }
+ end
+ elseif leftkern~=0 or rightkern~=0 then
+ properties[current]={
+ [injection]={
+ leftkern=leftkern,
+ rightkern=rightkern,
+ yoffset=yoffset,
+ },
+ }
+ else
+ properties[current]={
+ [injection]={
+ yoffset=yoffset,
+ },
+ }
+ end
+ return x,y,w,h,nofregisteredpairs
+ end
+ end
+ return x,y,w,h
+end
+function injections.setkern(current,factor,rlmode,x,injection)
+ local dx=factor*x
+ if dx~=0 then
+ nofregisteredkerns=nofregisteredkerns+1
+ local p=rawget(properties,current)
+ if not injection then
+ injection="injections"
+ end
+ if p then
+ local i=rawget(p,injection)
+ if i then
+ i.leftkern=dx+(i.leftkern or 0)
+ else
+ p[injection]={
+ leftkern=dx,
+ }
+ end
+ else
+ properties[current]={
+ [injection]={
+ leftkern=dx,
+ },
+ }
+ end
+ return dx,nofregisteredkerns
+ else
+ return 0,0
+ end
+end
+function injections.setmark(start,base,factor,rlmode,ba,ma,tfmbase,mkmk)
+ local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
+ nofregisteredmarks=nofregisteredmarks+1
+ if rlmode>=0 then
+ dx=tfmbase.width-dx
+ end
+ local p=rawget(properties,start)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ if i.markmark then
+ else
+ i.markx=dx
+ i.marky=dy
+ i.markdir=rlmode or 0
+ i.markbase=nofregisteredmarks
+ i.markbasenode=base
+ i.markmark=mkmk
+ end
+ else
+ p.injections={
+ markx=dx,
+ marky=dy,
+ markdir=rlmode or 0,
+ markbase=nofregisteredmarks,
+ markbasenode=base,
+ markmark=mkmk,
+ }
+ end
+ else
+ properties[start]={
+ injections={
+ markx=dx,
+ marky=dy,
+ markdir=rlmode or 0,
+ markbase=nofregisteredmarks,
+ markbasenode=base,
+ markmark=mkmk,
+ },
+ }
+ end
+ return dx,dy,nofregisteredmarks
+end
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+local function showchar(n,nested)
+ local char=getchar(n)
+ report_injections("%wfont %s, char %U, glyph %c",nested and 2 or 0,getfont(n),char,char)
+end
+local function show(n,what,nested,symbol)
+ if n then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,what)
+ if i then
+ local leftkern=i.leftkern or 0
+ local rightkern=i.rightkern or 0
+ local yoffset=i.yoffset or 0
+ local markx=i.markx or 0
+ local marky=i.marky or 0
+ local markdir=i.markdir or 0
+ local markbase=i.markbase or 0
+ local cursivex=i.cursivex or 0
+ local cursivey=i.cursivey or 0
+ local ligaindex=i.ligaindex or 0
+ local margin=nested and 4 or 2
+ if rightkern~=0 or yoffset~=0 then
+ report_injections("%w%s pair: lx %p, rx %p, dy %p",margin,symbol,leftkern,rightkern,yoffset)
+ elseif leftkern~=0 then
+ report_injections("%w%s kern: dx %p",margin,symbol,leftkern)
+ end
+ if markx~=0 or marky~=0 or markbase~=0 then
+ report_injections("%w%s mark: dx %p, dy %p, dir %s, base %s",margin,symbol,markx,marky,markdir,markbase~=0 and "yes" or "no")
+ end
+ if cursivex~=0 or cursivey~=0 then
+ report_injections("%w%s curs: dx %p, dy %p",margin,symbol,cursivex,cursivey)
+ end
+ if ligaindex~=0 then
+ report_injections("%w%s liga: index %i",margin,symbol,ligaindex)
+ end
+ end
+ end
+ end
+end
+local function showsub(n,what,where)
+ report_injections("begin subrun: %s",where)
+ for n in traverse_id(glyph_code,n) do
+ showchar(n,where)
+ show(n,what,where," ")
+ end
+ report_injections("end subrun")
+end
+local function trace(head,where)
+ report_injections("begin run %s: %s kerns, %s pairs, %s marks and %s cursives registered",
+ where or "",nofregisteredkerns,nofregisteredpairs,nofregisteredmarks,nofregisteredcursives)
+ local n=head
+ while n do
+ local id=getid(n)
+ if id==glyph_code then
+ showchar(n)
+ show(n,"injections",false," ")
+ show(n,"preinjections",false,"<")
+ show(n,"postinjections",false,">")
+ show(n,"replaceinjections",false,"=")
+ elseif id==disc_code then
+ local pre=getfield(n,"pre")
+ local post=getfield(n,"post")
+ local replace=getfield(n,"replace")
+ if pre then
+ showsub(pre,"preinjections","pre")
+ end
+ if post then
+ showsub(post,"postinjections","post")
+ end
+ if replace then
+ showsub(replace,"replaceinjections","replace")
+ end
+ end
+ n=getnext(n)
+ end
+ report_injections("end run")
+end
+local function show_result(head)
+ local current=head
+ local skipping=false
+ while current do
+ local id=getid(current)
+ if id==glyph_code then
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",
+ getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
+ skipping=false
+ elseif id==kern_code then
+ report_injections("kern: %p",getfield(current,"kern"))
+ skipping=false
+ elseif not skipping then
+ report_injections()
+ skipping=true
+ end
+ current=getnext(current)
+ end
+end
+local function collect_glyphs(head,offsets)
+ local glyphs,glyphi,nofglyphs={},{},0
+ local marks,marki,nofmarks={},{},0
+ local nf,tm=nil,nil
+ local n=head
+ local function identify(n,what)
+ local f=getfont(n)
+ if f~=nf then
+ nf=f
+ tm=fontdata[nf].resources
+ if tm then
+ tm=tm.marks
+ end
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks=nofmarks+1
+ marks[nofmarks]=n
+ marki[nofmarks]="injections"
+ else
+ nofglyphs=nofglyphs+1
+ glyphs[nofglyphs]=n
+ glyphi[nofglyphs]=what
+ end
+ if offsets then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,what)
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ end
+ end
+ while n do
+ local id=getid(n)
+ if id==glyph_code then
+ identify(n,"injections")
+ elseif id==disc_code then
+ local d=getfield(n,"pre")
+ if d then
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ identify(n,"preinjections")
+ end
+ end
+ end
+ local d=getfield(n,"post")
+ if d then
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ identify(n,"postinjections")
+ end
+ end
+ end
+ local d=getfield(n,"replace")
+ if d then
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ identify(n,"replaceinjections")
+ end
+ end
+ end
+ end
+ n=getnext(n)
+ end
+ return glyphs,glyphi,nofglyphs,marks,marki,nofmarks
+end
+local function inject_marks(marks,marki,nofmarks)
+ for i=1,nofmarks do
+ local n=marks[i]
+ local pn=rawget(properties,n)
+ if pn then
+ local ni=marki[i]
+ local pn=rawget(pn,ni)
+ if pn then
+ local p=pn.markbasenode
+ if p then
+ local px=getfield(p,"xoffset")
+ local ox=0
+ local rightkern=nil
+ local pp=rawget(properties,p)
+ if pp then
+ pp=rawget(pp,ni)
+ if pp then
+ rightkern=pp.rightkern
+ end
+ end
+ if rightkern then
+ if pn.markdir<0 then
+ ox=px-pn.markx-rightkern
+ else
+
+
+ if false then
+ local leftkern=pp.leftkern
+ if leftkern then
+ ox=px-pn.markx-leftkern
+ else
+ ox=px-pn.markx
+ end
+ else
+ ox=px-pn.markx
+ end
+ end
+ else
+ ox=px-pn.markx
+ local wn=getfield(n,"width")
+ if wn~=0 then
+ pn.leftkern=-wn/2
+ pn.rightkern=-wn/2
+ end
+ end
+ setfield(n,"xoffset",ox)
+ local py=getfield(p,"yoffset")
+ local oy=getfield(n,"yoffset")+py+pn.marky
+ setfield(n,"yoffset",oy)
+ else
+ end
+ end
+ end
+ end
+end
+local function inject_cursives(glyphs,glyphi,nofglyphs)
+ local cursiveanchor,lastanchor=nil,nil
+ local minc,maxc,last=0,0,nil
+ for i=1,nofglyphs do
+ local n=glyphs[i]
+ local pn=rawget(properties,n)
+ if pn then
+ pn=rawget(pn,glyphi[i])
+ end
+ if pn then
+ local cursivex=pn.cursivex
+ if cursivex then
+ if cursiveanchor then
+ if cursivex~=0 then
+ pn.leftkern=(pn.leftkern or 0)+cursivex
+ end
+ if lastanchor then
+ if maxc==0 then
+ minc=lastanchor
+ end
+ maxc=lastanchor
+ properties[cursiveanchor].cursivedy=pn.cursivey
+ end
+ last=n
+ else
+ maxc=0
+ end
+ elseif maxc>0 then
+ local ny=getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
+ end
+ maxc=0
+ end
+ if pn.cursiveanchor then
+ cursiveanchor=n
+ lastanchor=i
+ else
+ cursiveanchor=nil
+ lastanchor=nil
+ if maxc>0 then
+ local ny=getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
+ end
+ maxc=0
+ end
+ end
+ elseif maxc>0 then
+ local ny=getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",getfield(ti,"yoffset")+ny)
+ end
+ maxc=0
+ cursiveanchor=nil
+ lastanchor=nil
+ end
+ end
+ if last and maxc>0 then
+ local ny=getfield(last,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
+ end
+ end
+end
+local function inject_kerns(head,glist,ilist,length)
+ for i=1,length do
+ local n=glist[i]
+ local pn=rawget(properties,n)
+ if pn then
+ local dp=nil
+ local dr=nil
+ local ni=ilist[i]
+ local p=nil
+ if ni=="injections" then
+ p=getprev(n)
+ if p then
+ local id=getid(p)
+ if id==disc_code then
+ dp=getfield(p,"post")
+ dr=getfield(p,"replace")
+ end
+ end
+ end
+ if dp then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(dp)
+ insert_node_after(dp,t,newkern(leftkern))
+ setfield(p,"post",dp)
+ end
+ end
+ end
+ if dr then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(dr)
+ insert_node_after(dr,t,newkern(leftkern))
+ setfield(p,"replace",dr)
+ end
+ end
+ else
+ local i=rawget(pn,ni)
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ insert_node_before(head,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ end
+ end
+ end
+ end
+ end
+end
+local function inject_everything(head,where)
+ head=tonut(head)
+ if trace_injections then
+ trace(head,"everything")
+ end
+ local glyphs,glyphi,nofglyphs,marks,marki,nofmarks=collect_glyphs(head,nofregisteredpairs>0)
+ if nofglyphs>0 then
+ if nofregisteredcursives>0 then
+ inject_cursives(glyphs,glyphi,nofglyphs)
+ end
+ if nofregisteredmarks>0 then
+ inject_marks(marks,marki,nofmarks)
+ end
+ inject_kerns(head,glyphs,glyphi,nofglyphs)
+ end
+ if nofmarks>0 then
+ inject_kerns(head,marks,marki,nofmarks)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts=false
+ else
+ nofregisteredkerns=0
+ nofregisteredpairs=0
+ nofregisteredmarks=0
+ nofregisteredcursives=0
+ end
+ return tonode(head),true
+end
+local function inject_kerns_only(head,where)
+ head=tonut(head)
+ if trace_injections then
+ trace(head,"kerns")
+ end
+ local n=head
+ local p=nil
+ while n do
+ local id=getid(n)
+ if id==glyph_code then
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ if p then
+ local d=getfield(p,"post")
+ if d then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ setfield(p,"post",d)
+ end
+ end
+ end
+ local d=getfield(p,"replace")
+ if d then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ setfield(p,"replace",d)
+ end
+ end
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ end
+ end
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ head=insert_node_before(head,n,newkern(leftkern))
+ end
+ end
+ end
+ end
+ end
+ p=nil
+ elseif id==disc_code then
+ local d=getfield(n,"pre")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"preinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"pre",h)
+ end
+ end
+ local d=getfield(n,"post")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"post",h)
+ end
+ end
+ local d=getfield(n,"replace")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"replace",h)
+ end
+ end
+ p=n
+ else
+ p=nil
+ end
+ n=getnext(n)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts=false
+ else
+ nofregisteredkerns=0
+ end
+ return tonode(head),true
+end
+local function inject_pairs_only(head,where)
+ head=tonut(head)
+ if trace_injections then
+ trace(head,"pairs")
+ end
+ local n=head
+ local p=nil
+ while n do
+ local id=getid(n)
+ if id==glyph_code then
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ if p then
+ local d=getfield(p,"post")
+ if d then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ setfield(p,"post",d)
+ end
+ end
+ end
+ local d=getfield(p,"replace")
+ if d then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ setfield(p,"replace",d)
+ end
+ end
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ end
+ end
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ head=insert_node_before(head,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
+ end
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ end
+ end
+ p=nil
+ elseif id==disc_code then
+ local d=getfield(n,"pre")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"preinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
+ end
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"pre",h)
+ end
+ end
+ local d=getfield(n,"post")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
+ end
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"post",h)
+ end
+ end
+ local d=getfield(n,"replace")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
+ end
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"replace",h)
+ end
+ end
+ p=n
+ else
+ p=nil
+ end
+ n=getnext(n)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts=false
+ else
+ nofregisteredpairs=0
+ nofregisteredkerns=0
+ end
+ return tonode(head),true
+end
+function injections.handler(head,where)
+ if nofregisteredmarks>0 or nofregisteredcursives>0 then
+ return inject_everything(head,where)
+ elseif nofregisteredpairs>0 then
+ return inject_pairs_only(head,where)
+ elseif nofregisteredkerns>0 then
+ return inject_kerns_only(head,where)
+ else
+ return head,false
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otx']={
+ version=1.001,
+ comment="companion to font-otf.lua (analysing)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type=type
+if not trackers then trackers={ register=function() end } end
+local fonts,nodes,node=fonts,nodes,node
+local allocate=utilities.storage.allocate
+local otf=fonts.handlers.otf
+local analyzers=fonts.analyzers
+local initializers=allocate()
+local methods=allocate()
+analyzers.initializers=initializers
+analyzers.methods=methods
+analyzers.useunicodemarks=false
+local a_state=attributes.private('state')
+local nuts=nodes.nuts
+local tonut=nuts.tonut
+local getfield=nuts.getfield
+local getnext=nuts.getnext
+local getprev=nuts.getprev
+local getid=nuts.getid
+local getprop=nuts.getprop
+local setprop=nuts.setprop
+local getfont=nuts.getfont
+local getsubtype=nuts.getsubtype
+local getchar=nuts.getchar
+local traverse_id=nuts.traverse_id
+local traverse_node_list=nuts.traverse
+local end_of_math=nuts.end_of_math
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local disc_code=nodecodes.disc
+local math_code=nodecodes.math
+local fontdata=fonts.hashes.identifiers
+local categories=characters and characters.categories or {}
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local s_init=1 local s_rphf=7
+local s_medi=2 local s_half=8
+local s_fina=3 local s_pref=9
+local s_isol=4 local s_blwf=10
+local s_mark=5 local s_pstf=11
+local s_rest=6
+local states={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
+ mark=s_mark,
+ rest=s_rest,
+ rphf=s_rphf,
+ half=s_half,
+ pref=s_pref,
+ blwf=s_blwf,
+ pstf=s_pstf,
+}
+local features={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
+ rphf=s_rphf,
+ half=s_half,
+ pref=s_pref,
+ blwf=s_blwf,
+ pstf=s_pstf,
+}
+analyzers.states=states
+analyzers.features=features
+function analyzers.setstate(head,font)
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local descriptions=tfmdata.descriptions
+ local first,last,current,n,done=nil,nil,head,0,false
+ current=tonut(current)
+ while current do
+ local id=getid(current)
+ if id==glyph_code and getfont(current)==font then
+ done=true
+ local char=getchar(current)
+ local d=descriptions[char]
+ if d then
+ if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then
+ done=true
+ setprop(current,a_state,s_mark)
+ elseif n==0 then
+ first,last,n=current,current,1
+ setprop(current,a_state,s_init)
+ else
+ last,n=current,n+1
+ setprop(current,a_state,s_medi)
+ end
+ else
+ if first and first==last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ first,last,n=nil,nil,0
+ end
+ elseif id==disc_code then
+ setprop(current,a_state,s_medi)
+ last=current
+ else
+ if first and first==last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ first,last,n=nil,nil,0
+ if id==math_code then
+ current=end_of_math(current)
+ end
+ end
+ current=getnext(current)
+ end
+ if first and first==last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ return head,done
+end
+local function analyzeinitializer(tfmdata,value)
+ local script,language=otf.scriptandlanguage(tfmdata)
+ local action=initializers[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(tfmdata,value)
+ else
+ local action=action[language]
+ if action then
+ return action(tfmdata,value)
+ end
+ end
+end
+local function analyzeprocessor(head,font,attr)
+ local tfmdata=fontdata[font]
+ local script,language=otf.scriptandlanguage(tfmdata,attr)
+ local action=methods[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(head,font,attr)
+ else
+ action=action[language]
+ if action then
+ return action(head,font,attr)
+ end
+ end
+ return head,false
+end
+registerotffeature {
+ name="analyze",
+ description="analysis of character classes",
+ default=true,
+ initializers={
+ node=analyzeinitializer,
+ },
+ processors={
+ position=1,
+ node=analyzeprocessor,
+ }
+}
+methods.latn=analyzers.setstate
+local tatweel=0x0640
+local zwnj=0x200C
+local zwj=0x200D
+local isolated={
+ [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true,
+ [0x0604]=true,
+ [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true,
+ [0x06DD]=true,
+ [0x0856]=true,[0x0858]=true,[0x0857]=true,
+ [0x07FA]=true,
+ [zwnj]=true,
+ [0x08AD]=true,
+}
+local final={
+ [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true,
+ [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true,
+ [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true,
+ [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true,
+ [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true,
+ [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true,
+ [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true,
+ [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true,
+ [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true,
+ [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true,
+ [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true,
+ [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true,
+ [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true,
+ [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true,
+ [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true,
+ [0x0778]=true,[0x0779]=true,
+ [0x08AA]=true,[0x08AB]=true,[0x08AC]=true,
+ [0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true,
+ [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true,
+ [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true,
+ [0x072C]=true,[0x071E]=true,
+ [0x072F]=true,[0x074D]=true,
+ [0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true,
+ [0x084F]=true,
+ [0x08AE]=true,[0x08B1]=true,[0x08B2]=true,
+}
+local medial={
+ [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true,
+ [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true,
+ [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true,
+ [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true,
+ [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true,
+ [0x0641]=true,[0x0642]=true,[0x0643]=true,
+ [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true,
+ [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true,
+ [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true,
+ [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true,
+ [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true,
+ [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true,
+ [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true,
+ [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true,
+ [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true,
+ [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true,
+ [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true,
+ [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true,
+ [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true,
+ [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true,
+ [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true,
+ [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true,
+ [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true,
+ [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true,
+ [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true,
+ [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true,
+ [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true,
+ [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true,
+ [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true,
+ [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true,
+ [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true,
+ [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true,
+ [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true,
+ [0x077E]=true,[0x077F]=true,
+ [0x08A0]=true,[0x08A2]=true,[0x08A4]=true,[0x08A5]=true,
+ [0x08A6]=true,[0x0620]=true,[0x08A8]=true,[0x08A9]=true,
+ [0x08A7]=true,[0x08A3]=true,
+ [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true,
+ [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true,
+ [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true,
+ [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true,
+ [0x0729]=true,[0x072B]=true,[0x072D]=true,[0x072E]=true,
+ [0x074E]=true,[0x074F]=true,
+ [0x0841]=true,[0x0842]=true,[0x0843]=true,[0x0844]=true,
+ [0x0845]=true,[0x0847]=true,[0x0848]=true,[0x0855]=true,
+ [0x0851]=true,[0x084E]=true,[0x084D]=true,[0x084A]=true,
+ [0x084B]=true,[0x084C]=true,[0x0850]=true,[0x0852]=true,
+ [0x0853]=true,
+ [0x07D7]=true,[0x07E8]=true,[0x07D9]=true,[0x07EA]=true,
+ [0x07CA]=true,[0x07DB]=true,[0x07CC]=true,[0x07DD]=true,
+ [0x07CE]=true,[0x07DF]=true,[0x07D4]=true,[0x07E5]=true,
+ [0x07E9]=true,[0x07E7]=true,[0x07E3]=true,[0x07E2]=true,
+ [0x07E0]=true,[0x07E1]=true,[0x07DE]=true,[0x07DC]=true,
+ [0x07D1]=true,[0x07DA]=true,[0x07D8]=true,[0x07D6]=true,
+ [0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true,
+ [0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true,
+ [0x07E6]=true,
+ [tatweel]=true,[zwj]=true,
+ [0x08A1]=true,[0x08AF]=true,[0x08B0]=true,
+}
+local arab_warned={}
+local function warning(current,what)
+ local char=getchar(current)
+ if not arab_warned[char] then
+ log.report("analyze","arab: character %C has no %a class",char,what)
+ arab_warned[char]=true
+ end
+end
+local function finish(first,last)
+ if last then
+ if first==last then
+ local fc=getchar(first)
+ if medial[fc] or final[fc] then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ else
+ local lc=getchar(last)
+ if medial[lc] or final[lc] then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ end
+ first,last=nil,nil
+ elseif first then
+ local fc=getchar(first)
+ if medial[fc] or final[fc] then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ first=nil
+ end
+ return first,last
+end
+function methods.arab(head,font,attr)
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local marks=tfmdata.resources.marks
+ local first,last,current,done=nil,nil,head,false
+ current=tonut(current)
+ while current do
+ local id=getid(current)
+ if id==glyph_code and getfont(current)==font and getsubtype(current)<256 and not getprop(current,a_state) then
+ done=true
+ local char=getchar(current)
+ if marks[char] or (useunicodemarks and categories[char]=="mn") then
+ setprop(current,a_state,s_mark)
+ elseif isolated[char] then
+ first,last=finish(first,last)
+ setprop(current,a_state,s_isol)
+ first,last=nil,nil
+ elseif not first then
+ if medial[char] then
+ setprop(current,a_state,s_init)
+ first,last=first or current,current
+ elseif final[char] then
+ setprop(current,a_state,s_isol)
+ first,last=nil,nil
+ else
+ first,last=finish(first,last)
+ end
+ elseif medial[char] then
+ first,last=first or current,current
+ setprop(current,a_state,s_medi)
+ elseif final[char] then
+ if getprop(last,a_state)~=s_init then
+ setprop(last,a_state,s_medi)
+ end
+ setprop(current,a_state,s_fina)
+ first,last=nil,nil
+ elseif char>=0x0600 and char<=0x06FF then
+ setprop(current,a_state,s_rest)
+ first,last=finish(first,last)
+ else
+ first,last=finish(first,last)
+ end
+ else
+ if first or last then
+ first,last=finish(first,last)
+ end
+ if id==math_code then
+ current=end_of_math(current)
+ end
+ end
+ current=getnext(current)
+ end
+ if first or last then
+ finish(first,last)
+ end
+ return head,done
+end
+methods.syrc=methods.arab
+methods.mand=methods.arab
+methods.nko=methods.arab
+directives.register("otf.analyze.useunicodemarks",function(v)
+ analyzers.useunicodemarks=v
+end)
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otn']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local type,next,tonumber=type,next,tonumber
+local random=math.random
+local formatters=string.formatters
+local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes
+local registertracker=trackers.register
+local registerdirective=directives.register
+local fonts=fonts
+local otf=fonts.handlers.otf
+local trace_lookups=false registertracker("otf.lookups",function(v) trace_lookups=v end)
+local trace_singles=false registertracker("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false registertracker("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false registertracker("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false registertracker("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_contexts=false registertracker("otf.contexts",function(v) trace_contexts=v end)
+local trace_marks=false registertracker("otf.marks",function(v) trace_marks=v end)
+local trace_kerns=false registertracker("otf.kerns",function(v) trace_kerns=v end)
+local trace_cursive=false registertracker("otf.cursive",function(v) trace_cursive=v end)
+local trace_preparing=false registertracker("otf.preparing",function(v) trace_preparing=v end)
+local trace_bugs=false registertracker("otf.bugs",function(v) trace_bugs=v end)
+local trace_details=false registertracker("otf.details",function(v) trace_details=v end)
+local trace_applied=false registertracker("otf.applied",function(v) trace_applied=v end)
+local trace_steps=false registertracker("otf.steps",function(v) trace_steps=v end)
+local trace_skips=false registertracker("otf.skips",function(v) trace_skips=v end)
+local trace_directions=false registertracker("otf.directions",function(v) trace_directions=v end)
+local trace_kernruns=false registertracker("otf.kernruns",function(v) trace_kernruns=v end)
+local trace_discruns=false registertracker("otf.discruns",function(v) trace_discruns=v end)
+local trace_compruns=false registertracker("otf.compruns",function(v) trace_compruns=v end)
+local quit_on_no_replacement=true
+local zwnjruns=true
+registerdirective("otf.zwnjruns",function(v) zwnjruns=v end)
+registerdirective("otf.chain.quitonnoreplacement",function(value) quit_on_no_replacement=value end)
+local report_direct=logs.reporter("fonts","otf direct")
+local report_subchain=logs.reporter("fonts","otf subchain")
+local report_chain=logs.reporter("fonts","otf chain")
+local report_process=logs.reporter("fonts","otf process")
+local report_prepare=logs.reporter("fonts","otf prepare")
+local report_warning=logs.reporter("fonts","otf warning")
+local report_run=logs.reporter("fonts","otf run")
+registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end)
+registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end)
+registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
+registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
+registertracker("otf.actions","otf.replacements,otf.positions")
+registertracker("otf.injections","nodes.injections")
+registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
+local nuts=nodes.nuts
+local tonode=nuts.tonode
+local tonut=nuts.tonut
+local getfield=nuts.getfield
+local setfield=nuts.setfield
+local getnext=nuts.getnext
+local getprev=nuts.getprev
+local getid=nuts.getid
+local getattr=nuts.getattr
+local setattr=nuts.setattr
+local getprop=nuts.getprop
+local setprop=nuts.setprop
+local getfont=nuts.getfont
+local getsubtype=nuts.getsubtype
+local getchar=nuts.getchar
+local insert_node_before=nuts.insert_before
+local insert_node_after=nuts.insert_after
+local delete_node=nuts.delete
+local remove_node=nuts.remove
+local copy_node=nuts.copy
+local copy_node_list=nuts.copy_list
+local find_node_tail=nuts.tail
+local flush_node_list=nuts.flush_list
+local free_node=nuts.free
+local end_of_math=nuts.end_of_math
+local traverse_nodes=nuts.traverse
+local traverse_id=nuts.traverse_id
+local setmetatableindex=table.setmetatableindex
+local zwnj=0x200C
+local zwj=0x200D
+local wildcard="*"
+local default="dflt"
+local nodecodes=nodes.nodecodes
+local whatcodes=nodes.whatcodes
+local glyphcodes=nodes.glyphcodes
+local disccodes=nodes.disccodes
+local glyph_code=nodecodes.glyph
+local glue_code=nodecodes.glue
+local disc_code=nodecodes.disc
+local math_code=nodecodes.math
+local dir_code=whatcodes.dir
+local localpar_code=whatcodes.localpar
+local discretionary_code=disccodes.discretionary
+local ligature_code=glyphcodes.ligature
+local privateattribute=attributes.private
+local a_state=privateattribute('state')
+local a_cursbase=privateattribute('cursbase')
+local injections=nodes.injections
+local setmark=injections.setmark
+local setcursive=injections.setcursive
+local setkern=injections.setkern
+local setpair=injections.setpair
+local resetinjection=injections.reset
+local copyinjection=injections.copy
+local setligaindex=injections.setligaindex
+local getligaindex=injections.getligaindex
+local cursonce=true
+local fonthashes=fonts.hashes
+local fontdata=fonthashes.identifiers
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local onetimemessage=fonts.loggers.onetimemessage or function() end
+otf.defaultnodealternate="none"
+local tfmdata=false
+local characters=false
+local descriptions=false
+local resources=false
+local marks=false
+local currentfont=false
+local lookuptable=false
+local anchorlookups=false
+local lookuptypes=false
+local lookuptags=false
+local handlers={}
+local rlmode=0
+local featurevalue=false
+local sweephead={}
+local sweepnode=nil
+local sweepprev=nil
+local sweepnext=nil
+local notmatchpre={}
+local notmatchpost={}
+local notmatchreplace={}
+local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
+local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
+local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_direct(...)
+end
+local function logwarning(...)
+ report_direct(...)
+end
+local f_unicode=formatters["%U"]
+local f_uniname=formatters["%U (%s)"]
+local f_unilist=formatters["% t (% t)"]
+local function gref(n)
+ if type(n)=="number" then
+ local description=descriptions[n]
+ local name=description and description.name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num,nam={},{}
+ for i=1,#n do
+ local ni=n[i]
+ if tonumber(ni) then
+ local di=descriptions[ni]
+ num[i]=f_unicode(ni)
+ nam[i]=di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in node mode tracing>"
+ end
+end
+local function cref(kind,chainname,chainlookupname,lookupname,index)
+ if index then
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index)
+ elseif lookupname then
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname])
+ elseif chainlookupname then
+ return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname])
+ elseif chainname then
+ return formatters["feature %a, chain %a"](kind,lookuptags[chainname])
+ else
+ return formatters["feature %a"](kind)
+ end
+end
+local function pref(kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname])
+end
+local function copy_glyph(g)
+ local components=getfield(g,"components")
+ if components then
+ setfield(g,"components",nil)
+ local n=copy_node(g)
+ copyinjection(n,g)
+ setfield(g,"components",components)
+ return n
+ else
+ local n=copy_node(g)
+ copyinjection(n,g)
+ return n
+ end
+end
+local function flattendisk(head,disc)
+ local replace=getfield(disc,"replace")
+ setfield(disc,"replace",nil)
+ free_node(disc)
+ if head==disc then
+ local next=getnext(disc)
+ if replace then
+ if next then
+ local tail=find_node_tail(replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ end
+ return replace,replace
+ elseif next then
+ return next,next
+ else
+ return
+ end
+ else
+ local next=getnext(disc)
+ local prev=getprev(disc)
+ if replace then
+ local tail=find_node_tail(replace)
+ if next then
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ end
+ setfield(prev,"next",replace)
+ setfield(replace,"prev",prev)
+ return head,replace
+ else
+ if next then
+ setfield(next,"prev",prev)
+ end
+ setfield(prev,"next",next)
+ return head,next
+ end
+ end
+end
+local function appenddisc(disc,list)
+ local post=getfield(disc,"post")
+ local replace=getfield(disc,"replace")
+ local phead=list
+ local rhead=copy_node_list(list)
+ local ptail=find_node_tail(post)
+ local rtail=find_node_tail(replace)
+ if post then
+ setfield(ptail,"next",phead)
+ setfield(phead,"prev",ptail)
+ else
+ setfield(disc,"post",phead)
+ end
+ if replace then
+ setfield(rtail,"next",rhead)
+ setfield(rhead,"prev",rtail)
+ else
+ setfield(disc,"replace",rhead)
+ end
+end
+local function markstoligature(kind,lookupname,head,start,stop,char)
+ if start==stop and getchar(start)==char then
+ return head,start
+ else
+ local prev=getprev(start)
+ local next=getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
+ local base=copy_glyph(start)
+ if head==start then
+ head=base
+ end
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start)
+ if prev then
+ setfield(prev,"next",base)
+ end
+ if next then
+ setfield(next,"prev",base)
+ end
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
+ return head,base
+ end
+end
+local function getcomponentindex(start)
+ if getid(start)~=glyph_code then
+ return 0
+ elseif getsubtype(start)==ligature_code then
+ local i=0
+ local components=getfield(start,"components")
+ while components do
+ i=i+getcomponentindex(components)
+ components=getnext(components)
+ end
+ return i
+ elseif not marks[getchar(start)] then
+ return 1
+ else
+ return 0
+ end
+end
+local a_noligature=attributes.private("noligature")
+local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound)
+ if getattr(start,a_noligature)==1 then
+ return head,start
+ end
+ if start==stop and getchar(start)==char then
+ resetinjection(start)
+ setfield(start,"char",char)
+ return head,start
+ end
+ local components=getfield(start,"components")
+ if components then
+ end
+ local prev=getprev(start)
+ local next=getnext(stop)
+ local comp=start
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
+ local base=copy_glyph(start)
+ if start==head then
+ head=base
+ end
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",comp)
+ if prev then
+ setfield(prev,"next",base)
+ end
+ if next then
+ setfield(next,"prev",base)
+ end
+ setfield(base,"prev",prev)
+ setfield(base,"next",next)
+ if not discfound then
+ local deletemarks=markflag~="mark"
+ local components=start
+ local baseindex=0
+ local componentindex=0
+ local head=base
+ local current=base
+ while start do
+ local char=getchar(start)
+ if not marks[char] then
+ baseindex=baseindex+componentindex
+ componentindex=getcomponentindex(start)
+ elseif not deletemarks then
+ setligaindex(start,baseindex+getligaindex(start,componentindex))
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
+ end
+ local n=copy_node(start)
+ copyinjection(n,start)
+ head,current=insert_node_after(head,current,n)
+ elseif trace_marks then
+ logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
+ end
+ start=getnext(start)
+ end
+ local start=getnext(current)
+ while start and getid(start)==glyph_code do
+ local char=getchar(start)
+ if marks[char] then
+ setligaindex(start,baseindex+getligaindex(start,componentindex))
+ if trace_marks then
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
+ end
+ else
+ break
+ end
+ start=getnext(start)
+ end
+ else
+ local discprev=getfield(discfound,"prev")
+ local discnext=getfield(discfound,"next")
+ if discprev and discnext then
+ local pre=getfield(discfound,"pre")
+ local post=getfield(discfound,"post")
+ local replace=getfield(discfound,"replace")
+ if not replace then
+ local prev=getfield(base,"prev")
+ local copied=copy_node_list(comp)
+ setfield(discnext,"prev",nil)
+ setfield(discprev,"next",nil)
+ if pre then
+ setfield(discprev,"next",pre)
+ setfield(pre,"prev",discprev)
+ end
+ pre=comp
+ if post then
+ local tail=find_node_tail(post)
+ setfield(tail,"next",discnext)
+ setfield(discnext,"prev",tail)
+ setfield(post,"prev",nil)
+ else
+ post=discnext
+ end
+ setfield(prev,"next",discfound)
+ setfield(discfound,"prev",prev)
+ setfield(discfound,"next",next)
+ setfield(next,"prev",discfound)
+ setfield(base,"next",nil)
+ setfield(base,"prev",nil)
+ setfield(base,"components",copied)
+ setfield(discfound,"pre",pre)
+ setfield(discfound,"post",post)
+ setfield(discfound,"replace",base)
+ setfield(discfound,"subtype",discretionary_code)
+ base=prev
+ end
+ end
+ end
+ return head,base
+end
+local function multiple_glyphs(head,start,multiple,ignoremarks)
+ local nofmultiples=#multiple
+ if nofmultiples>0 then
+ resetinjection(start)
+ setfield(start,"char",multiple[1])
+ if nofmultiples>1 then
+ local sn=getnext(start)
+ for k=2,nofmultiples do
+ local n=copy_node(start)
+ resetinjection(n)
+ setfield(n,"char",multiple[k])
+ setfield(n,"prev",start)
+ setfield(n,"next",sn)
+ if sn then
+ setfield(sn,"prev",n)
+ end
+ setfield(start,"next",n)
+ start=n
+ end
+ end
+ return head,start,true
+ else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(getchar(start)))
+ end
+ return head,start,false
+ end
+end
+local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
+ local n=#alternatives
+ if value=="random" then
+ local r=random(1,n)
+ return alternatives[r],trace_alternatives and formatters["value %a, taking %a"](value,r)
+ elseif value=="first" then
+ return alternatives[1],trace_alternatives and formatters["value %a, taking %a"](value,1)
+ elseif value=="last" then
+ return alternatives[n],trace_alternatives and formatters["value %a, taking %a"](value,n)
+ else
+ value=tonumber(value)
+ if type(value)~="number" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif value>n then
+ local defaultalt=otf.defaultnodealternate
+ if defaultalt=="first" then
+ return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif defaultalt=="last" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
+ else
+ return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
+ end
+ elseif value==0 then
+ return getchar(start),trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ elseif value<1 then
+ return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
+ else
+ return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value)
+ end
+ end
+end
+function handlers.gsub_single(head,start,kind,lookupname,replacement)
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
+ end
+ resetinjection(start)
+ setfield(start,"char",replacement)
+ return head,start,true
+end
+function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
+ end
+ resetinjection(start)
+ setfield(start,"char",choice)
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment)
+ end
+ end
+ return head,start,true
+end
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple))
+ end
+ return multiple_glyphs(head,start,multiple,sequence.flags[1])
+end
+function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
+ local s,stop=getnext(start),nil
+ local startchar=getchar(start)
+ if marks[startchar] then
+ while s do
+ local id=getid(s)
+ if id==glyph_code and getfont(s)==currentfont and getsubtype(s)<256 then
+ local lg=ligature[getchar(s)]
+ if lg then
+ stop=s
+ ligature=lg
+ s=getnext(s)
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if stop then
+ local lig=ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar=getchar(stop)
+ head,start=markstoligature(kind,lookupname,head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
+ else
+ head,start=markstoligature(kind,lookupname,head,start,stop,lig)
+ end
+ return head,start,true,false
+ else
+ end
+ end
+ else
+ local skipmark=sequence.flags[1]
+ local discfound=false
+ local lastdisc=nil
+ while s do
+ local id=getid(s)
+ if id==glyph_code and getsubtype(s)<256 then
+ if getfont(s)==currentfont then
+ local char=getchar(s)
+ if skipmark and marks[char] then
+ s=getnext(s)
+ else
+ local lg=ligature[char]
+ if lg then
+ if not discfound and lastdisc then
+ discfound=lastdisc
+ lastdisc=nil
+ end
+ stop=s
+ ligature=lg
+ s=getnext(s)
+ else
+ break
+ end
+ end
+ else
+ break
+ end
+ elseif id==disc_code then
+ lastdisc=s
+ s=getnext(s)
+ else
+ break
+ end
+ end
+ local lig=ligature.ligature
+ if lig then
+ if stop then
+ if trace_ligatures then
+ local stopchar=getchar(stop)
+ head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
+ else
+ head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ end
+ else
+ resetinjection(start)
+ setfield(start,"char",lig)
+ if trace_ligatures then
+ logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
+ end
+ end
+ return head,start,true,discfound
+ else
+ end
+ end
+ return head,start,false,discfound
+end
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence,injection)
+ local startchar=getchar(start)
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,injection)
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ end
+ return head,start,false
+end
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence,lookuphash,i,injection)
+ local snext=getnext(start)
+ if not snext then
+ return head,start,false
+ else
+ local prev=start
+ local done=false
+ local factor=tfmdata.parameters.factor
+ local lookuptype=lookuptypes[lookupname]
+ while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do
+ local nextchar=getchar(snext)
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=getnext(snext)
+ else
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,injection)
+ if trace_kerns then
+ local startchar=getchar(start)
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,injection)
+ if trace_kerns then
+ local startchar=getchar(start)
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn,injection)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
+ end
+ done=true
+ end
+ break
+ end
+ end
+ return head,start,done
+ end
+end
+function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=getchar(start)
+ if marks[markchar] then
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ if marks[basechar] then
+ while true do
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ end
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=getchar(start)
+ if marks[markchar] then
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ if marks[basechar] then
+ while true do
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=getligaindex(start)
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ else
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index)
+ end
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=getchar(start)
+ if marks[markchar] then
+ local base=getprev(start)
+ local slc=getligaindex(start)
+ if slc then
+ while base do
+ local blc=getligaindex(base)
+ if blc and blc~=slc then
+ base=getprev(base)
+ else
+ break
+ end
+ end
+ end
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar],true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
+ local alreadydone=cursonce and getprop(start,a_cursbase)
+ if not alreadydone then
+ local done=false
+ local startchar=getchar(start)
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt=getnext(start)
+ while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do
+ local nextchar=getchar(nxt)
+ if marks[nextchar] then
+ nxt=getnext(nxt)
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done=true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
+ end
+ return head,start,false
+ end
+end
+local chainprocs={}
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_subchain(...)
+end
+local logwarning=report_subchain
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_chain(...)
+end
+local logwarning=report_chain
+function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
+ logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head,start,false
+end
+function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
+ local char=getchar(start)
+ local replacement=replacements[char]
+ if replacement then
+ if trace_singles then
+ logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
+ end
+ resetinjection(start)
+ setfield(start,"char",replacement)
+ return head,start,true
+ else
+ return head,start,false
+ end
+end
+function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local current=start
+ local subtables=currentlookup.subtables
+ if #subtables>1 then
+ logwarning("todo: check if we need to loop over the replacements: % t",subtables)
+ end
+ while current do
+ if getid(current)==glyph_code then
+ local currentchar=getchar(current)
+ local lookupname=subtables[1]
+ local replacement=lookuphash[lookupname]
+ if not replacement then
+ if trace_bugs then
+ logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ replacement=replacement[currentchar]
+ if not replacement or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
+ end
+ else
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
+ end
+ resetinjection(current)
+ setfield(current,"char",replacement)
+ end
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=getnext(current)
+ end
+ end
+ return head,start,false
+end
+function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local startchar=getchar(start)
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local replacements=lookuphash[lookupname]
+ if not replacements then
+ if trace_bugs then
+ logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ replacements=replacements[startchar]
+ if not replacements or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
+ end
+ else
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
+ end
+ return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
+ end
+ end
+ return head,start,false
+end
+function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local current=start
+ local subtables=currentlookup.subtables
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ while current do
+ if getid(current)==glyph_code then
+ local currentchar=getchar(current)
+ local lookupname=subtables[1]
+ local alternatives=lookuphash[lookupname]
+ if not alternatives then
+ if trace_bugs then
+ logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ alternatives=alternatives[currentchar]
+ if alternatives then
+ local choice,comment=get_alternative_glyph(current,alternatives,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
+ end
+ resetinjection(start)
+ setfield(start,"char",choice)
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
+ end
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=getnext(current)
+ end
+ end
+ return head,start,false
+end
+function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local startchar=getchar(start)
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local ligatures=lookuphash[lookupname]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ ligatures=ligatures[startchar]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ end
+ else
+ local s=getnext(start)
+ local discfound=false
+ local last=stop
+ local nofreplacements=1
+ local skipmark=currentlookup.flags[1]
+ while s do
+ local id=getid(s)
+ if id==disc_code then
+ if not discfound then
+ discfound=s
+ end
+ if s==stop then
+ break
+ else
+ s=getnext(s)
+ end
+ else
+ local schar=getchar(s)
+ if skipmark and marks[schar] then
+ s=getnext(s)
+ else
+ local lg=ligatures[schar]
+ if lg then
+ ligatures,last,nofreplacements=lg,s,nofreplacements+1
+ if s==stop then
+ break
+ else
+ s=getnext(s)
+ end
+ else
+ break
+ end
+ end
+ end
+ end
+ local l2=ligatures.ligature
+ if l2 then
+ if chainindex then
+ stop=last
+ end
+ if trace_ligatures then
+ if start==stop then
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
+ else
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
+ end
+ end
+ head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
+ return head,start,true,nofreplacements,discfound
+ elseif trace_bugs then
+ if start==stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
+ end
+ end
+ end
+ end
+ return head,start,false,0,false
+end
+function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local startchar=getchar(start)
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
+ if kerns then
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns)
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ end
+ end
+ end
+ return head,start,false
+end
+function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local snext=getnext(start)
+ if snext then
+ local startchar=getchar(start)
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
+ if kerns then
+ local lookuptype=lookuptypes[lookupname]
+ local prev,done=start,false
+ local factor=tfmdata.parameters.factor
+ while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do
+ local nextchar=getchar(snext)
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=getnext(snext)
+ else
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local startchar=getchar(start)
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a)
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=getchar(start)
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b)
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ end
+ done=true
+ end
+ break
+ end
+ end
+ return head,start,done
+ end
+ end
+ end
+ return head,start,false
+end
+function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=getchar(start)
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ if marks[basechar] then
+ while true do
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=getchar(start)
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ if marks[basechar] then
+ while true do
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=getligaindex(start)
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=getchar(start)
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=getprev(start)
+ local slc=getligaindex(start)
+ if slc then
+ while base do
+ local blc=getligaindex(base)
+ if blc and blc~=slc then
+ base=getprev(base)
+ else
+ break
+ end
+ end
+ end
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar],true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local alreadydone=cursonce and getprop(start,a_cursbase)
+ if not alreadydone then
+ local startchar=getchar(start)
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local exitanchors=lookuphash[lookupname]
+ if exitanchors then
+ exitanchors=exitanchors[startchar]
+ end
+ if exitanchors then
+ local done=false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt=getnext(start)
+ while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do
+ local nextchar=getchar(nxt)
+ if marks[nextchar] then
+ nxt=getnext(nxt)
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done=true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
+ end
+ return head,start,false
+ end
+ end
+ return head,start,false
+end
+local function show_skip(kind,chainname,char,ck,class)
+ if ck[9] then
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
+end
+local function chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,chainindex,sequence,chainproc)
+ if not start then
+ return head,start,false
+ end
+ local startishead=start==head
+ local seq=ck[3]
+ local f=ck[4]
+ local l=ck[5]
+ local s=#seq
+ local done=false
+ local sweepnode=sweepnode
+ local sweeptype=sweeptype
+ local sweepoverflow=false
+ local checkdisc=getprev(head)
+ local keepdisc=not sweepnode
+ local lookaheaddisc=nil
+ local backtrackdisc=nil
+ local current=start
+ local last=start
+ local prev=getprev(start)
+ local i=f
+ while i<=l do
+ local id=getid(current)
+ if id==glyph_code then
+ i=i+1
+ last=current
+ current=getnext(current)
+ elseif id==disc_code then
+ if keepdisc then
+ keepdisc=false
+ if notmatchpre[current]~=notmatchreplace[current] then
+ lookaheaddisc=current
+ end
+ local replace=getfield(current,"replace")
+ while replace and i<=l do
+ if getid(replace)==glyph_code then
+ i=i+1
+ end
+ replace=getnext(replace)
+ end
+ last=current
+ current=getnext(c)
+ else
+ head,current=flattendisk(head,current)
+ end
+ else
+ last=current
+ current=getnext(current)
+ end
+ if current then
+ elseif sweepoverflow then
+ break
+ elseif sweeptype=="post" or sweeptype=="replace" then
+ current=getnext(sweepnode)
+ if current then
+ sweeptype=nil
+ sweepoverflow=true
+ else
+ break
+ end
+ end
+ end
+ if sweepoverflow then
+ local prev=current and getprev(current)
+ if not current or prev~=sweepnode then
+ local head=getnext(sweepnode)
+ local tail=nil
+ if prev then
+ tail=prev
+ setfield(current,"prev",sweepnode)
+ else
+ tail=find_node_tail(head)
+ end
+ setfield(sweepnode,"next",current)
+ setfield(head,"prev",nil)
+ setfield(tail,"next",nil)
+ appenddisc(sweepnode,head)
+ end
+ end
+ if l<s then
+ local i=l
+ local t=sweeptype=="post" or sweeptype=="replace"
+ while current and i<s do
+ local id=getid(current)
+ if id==glyph_code then
+ i=i+1
+ current=getnext(current)
+ elseif id==disc_code then
+ if keepdisc then
+ keepdisc=false
+ if notmatchpre[current]~=notmatchreplace[current] then
+ lookaheaddisc=current
+ end
+ local replace=getfield(c,"replace")
+ while replace and i<s do
+ if getid(replace)==glyph_code then
+ i=i+1
+ end
+ replace=getnext(replace)
+ end
+ current=getnext(current)
+ elseif notmatchpre[current]~=notmatchreplace[current] then
+ head,current=flattendisk(head,current)
+ else
+ current=getnext(current)
+ end
+ else
+ current=getnext(current)
+ end
+ if not current and t then
+ current=getnext(sweepnode)
+ if current then
+ sweeptype=nil
+ end
+ end
+ end
+ end
+ if f>1 then
+ local current=prev
+ local i=f
+ local t=sweeptype=="pre" or sweeptype=="replace"
+ if not current and t and current==checkdisk then
+ current=getprev(sweepnode)
+ end
+ while current and i>1 do
+ local id=getid(current)
+ if id==glyph_code then
+ i=i-1
+ elseif id==disc_code then
+ if keepdisc then
+ keepdisc=false
+ if notmatchpost[current]~=notmatchreplace[current] then
+ backtrackdisc=current
+ end
+ local replace=getfield(current,"replace")
+ while replace and i>1 do
+ if getid(replace)==glyph_code then
+ i=i-1
+ end
+ replace=getnext(replace)
+ end
+ elseif notmatchpost[current]~=notmatchreplace[current] then
+ head,current=flattendisk(head,current)
+ end
+ end
+ current=getprev(current)
+ if t and current==checkdisk then
+ current=getprev(sweepnode)
+ end
+ end
+ end
+ local ok=false
+ if lookaheaddisc then
+ local cf=start
+ local cl=getprev(lookaheaddisc)
+ local cprev=getprev(start)
+ local insertedmarks=0
+ while cprev and getid(cf)==glyph_code and getfont(cf)==currentfont and getsubtype(cf)<256 and marks[getchar(cf)] do
+ insertedmarks=insertedmarks+1
+ cf=cprev
+ startishead=cf==head
+ cprev=getprev(cprev)
+ end
+ setfield(lookaheaddisc,"prev",cprev)
+ if cprev then
+ setfield(cprev,"next",lookaheaddisc)
+ end
+ setfield(cf,"prev",nil)
+ setfield(cl,"next",nil)
+ if startishead then
+ head=lookaheaddisc
+ end
+ local replace=getfield(lookaheaddisc,"replace")
+ local pre=getfield(lookaheaddisc,"pre")
+ local new=copy_node_list(cf)
+ local cnew=new
+ for i=1,insertedmarks do
+ cnew=getnext(cnew)
+ end
+ local clast=cnew
+ for i=f,l do
+ clast=getnext(clast)
+ end
+ if not notmatchpre[lookaheaddisc] then
+ cf,start,ok=chainproc(cf,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ end
+ if not notmatchreplace[lookaheaddisc] then
+ new,cnew,ok=chainproc(new,cnew,clast,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ end
+ if pre then
+ setfield(cl,"next",pre)
+ setfield(pre,"prev",cl)
+ end
+ if replace then
+ local tail=find_node_tail(new)
+ setfield(tail,"next",replace)
+ setfield(replace,"prev",tail)
+ end
+ setfield(lookaheaddisc,"pre",cf)
+ setfield(lookaheaddisc,"replace",new)
+ start=getprev(lookaheaddisc)
+ sweephead[cf]=getnext(clast)
+ sweephead[new]=getnext(last)
+ elseif backtrackdisc then
+ local cf=getnext(backtrackdisc)
+ local cl=start
+ local cnext=getnext(start)
+ local insertedmarks=0
+ while cnext and getid(cnext)==glyph_code and getfont(cnext)==currentfont and getsubtype(cnext)<256 and marks[getchar(cnext)] do
+ insertedmarks=insertedmarks+1
+ cl=cnext
+ cnext=getnext(cnext)
+ end
+ if cnext then
+ setfield(cnext,"prev",backtrackdisc)
+ end
+ setfield(backtrackdisc,"next",cnext)
+ setfield(cf,"prev",nil)
+ setfield(cl,"next",nil)
+ local replace=getfield(backtrackdisc,"replace")
+ local post=getfield(backtrackdisc,"post")
+ local new=copy_node_list(cf)
+ local cnew=find_node_tail(new)
+ for i=1,insertedmarks do
+ cnew=getprev(cnew)
+ end
+ local clast=cnew
+ for i=f,l do
+ clast=getnext(clast)
+ end
+ if not notmatchpost[backtrackdisc] then
+ cf,start,ok=chainproc(cf,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ end
+ if not notmatchreplace[backtrackdisc] then
+ new,cnew,ok=chainproc(new,cnew,clast,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ end
+ if post then
+ local tail=find_node_tail(post)
+ setfield(tail,"next",cf)
+ setfield(cf,"prev",tail)
+ else
+ post=cf
+ end
+ if replace then
+ local tail=find_node_tail(replace)
+ setfield(tail,"next",new)
+ setfield(new,"prev",tail)
+ else
+ replace=new
+ end
+ setfield(backtrackdisc,"post",post)
+ setfield(backtrackdisc,"replace",replace)
+ start=getprev(backtrackdisc)
+ sweephead[post]=getnext(clast)
+ sweephead[replace]=getnext(last)
+ else
+ head,start,ok=chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ end
+ return head,start,ok
+end
+local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
+ local sweepnode=sweepnode
+ local sweeptype=sweeptype
+ local diskseen=false
+ local checkdisc=getprev(head)
+ local flags=sequence.flags
+ local done=false
+ local skipmark=flags[1]
+ local skipligature=flags[2]
+ local skipbase=flags[3]
+ local markclass=sequence.markclass
+ local skipped=false
+ for k=1,#contexts do
+ local match=true
+ local current=start
+ local last=start
+ local ck=contexts[k]
+ local seq=ck[3]
+ local s=#seq
+ if s==1 then
+ match=getid(current)==glyph_code and getfont(current)==currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
+ else
+ local f=ck[4]
+ local l=ck[5]
+ if f==1 and f==l then
+ else
+ if f==l then
+ else
+ local discfound=nil
+ local n=f+1
+ last=getnext(last)
+ while n<=l do
+ if not last and (sweeptype=="post" or sweeptype=="replace") then
+ last=getnext(sweepnode)
+ sweeptype=nil
+ end
+ if last then
+ local id=getid(last)
+ if id==glyph_code then
+ if getfont(last)==currentfont and getsubtype(last)<256 then
+ local char=getchar(last)
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class or "base"
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ last=getnext(last)
+ elseif seq[n][char] then
+ if n<l then
+ last=getnext(last)
+ end
+ n=n+1
+ else
+ if discfound then
+ notmatchreplace[discfound]=true
+ match=not notmatchpre[discfound]
+ else
+ match=false
+ end
+ break
+ end
+ else
+ if discfound then
+ notmatchreplace[discfound]=true
+ match=not notmatchpre[discfound]
+ else
+ match=false
+ end
+ break
+ end
+ else
+ if discfound then
+ notmatchreplace[discfound]=true
+ match=not notmatchpre[discfound]
+ else
+ match=false
+ end
+ break
+ end
+ elseif id==disc_code then
+ diskseen=true
+ discfound=last
+ notmatchpre[last]=nil
+ notmatchpost[last]=true
+ notmatchreplace[last]=nil
+ local pre=getfield(last,"pre")
+ local replace=getfield(last,"replace")
+ if pre then
+ local n=n
+ while pre do
+ if seq[n][getchar(pre)] then
+ n=n+1
+ pre=getnext(pre)
+ if n>l then
+ break
+ end
+ else
+ notmatchpre[last]=true
+ break
+ end
+ end
+ if n<=l then
+ notmatchpre[last]=true
+ end
+ else
+ notmatchpre[last]=true
+ end
+ if replace then
+ while replace do
+ if seq[n][getchar(replace)] then
+ n=n+1
+ replace=getnext(replace)
+ if n>l then
+ break
+ end
+ else
+ notmatchreplace[last]=true
+ match=not notmatchpre[last]
+ break
+ end
+ end
+ match=not notmatchpre[last]
+ end
+ last=getnext(last)
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ end
+ end
+ end
+ if match and f>1 then
+ local prev=getprev(start)
+ if prev then
+ if prev==checkdisc and (sweeptype=="pre" or sweeptype=="replace") then
+ prev=getprev(sweepnode)
+ end
+ if prev then
+ local discfound=nil
+ local n=f-1
+ while n>=1 do
+ if prev then
+ local id=getid(prev)
+ if id==glyph_code then
+ if getfont(prev)==currentfont and getsubtype(prev)<256 then
+ local char=getchar(prev)
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n -1
+ else
+ if discfound then
+ notmatchreplace[discfound]=true
+ match=not notmatchpost[discfound]
+ else
+ match=false
+ end
+ break
+ end
+ else
+ if discfound then
+ notmatchreplace[discfound]=true
+ match=not notmatchpost[discfound]
+ else
+ match=false
+ end
+ break
+ end
+ else
+ if discfound then
+ notmatchreplace[discfound]=true
+ match=not notmatchpost[discfound]
+ else
+ match=false
+ end
+ break
+ end
+ elseif id==disc_code then
+ diskseen=true
+ discfound=prev
+ notmatchpre[prev]=true
+ notmatchpost[prev]=nil
+ notmatchreplace[prev]=nil
+ local pre=getfield(prev,"pre")
+ local post=getfield(prev,"post")
+ local replace=getfield(prev,"replace")
+ if pre~=start and post~=start and replace~=start then
+ if post then
+ local n=n
+ local posttail=find_node_tail(post)
+ while posttail do
+ if seq[n][getchar(posttail)] then
+ n=n-1
+ if posttail==post then
+ break
+ else
+ posttail=getprev(posttail)
+ if n<1 then
+ break
+ end
+ end
+ else
+ notmatchpost[prev]=true
+ break
+ end
+ end
+ if n>=1 then
+ notmatchpost[prev]=true
+ end
+ else
+ notmatchpost[prev]=true
+ end
+ if replace then
+ local replacetail=find_node_tail(replace)
+ while replacetail do
+ if seq[n][getchar(replacetail)] then
+ n=n-1
+ if replacetail==replace then
+ break
+ else
+ replacetail=getprev(replacetail)
+ if n<1 then
+ break
+ end
+ end
+ else
+ notmatchreplace[prev]=true
+ match=not notmatchpost[prev]
+ break
+ end
+ end
+ if not match then
+ break
+ end
+ else
+ end
+ else
+ end
+ elseif seq[n][32] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ prev=getprev(prev)
+ elseif seq[n][32] then
+ n=n-1
+ else
+ match=false
+ break
+ end
+ end
+ else
+ match=false
+ end
+ else
+ match=false
+ end
+ end
+ if match and s>l then
+ local current=last and getnext(last)
+ if not current then
+ if sweeptype=="post" or sweeptype=="replace" then
+ current=getnext(sweepnode)
+ end
+ end
+ if current then
+ local discfound=nil
+ local n=l+1
+ while n<=s do
+ if current then
+ local id=getid(current)
+ if id==glyph_code then
+ if getfont(current)==currentfont and getsubtype(current)<256 then
+ local char=getchar(current)
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n+1
+ else
+ if discfound then
+ notmatchreplace[discfound]=true
+ match=not notmatchpre[discfound]
+ else
+ match=false
+ end
+ break
+ end
+ else
+ if discfound then
+ notmatchreplace[discfound]=true
+ match=not notmatchpre[discfound]
+ else
+ match=false
+ end
+ break
+ end
+ else
+ if discfound then
+ notmatchreplace[discfound]=true
+ match=not notmatchpre[discfound]
+ else
+ match=false
+ end
+ break
+ end
+ elseif id==disc_code then
+ diskseen=true
+ discfound=current
+ notmatchpre[current]=nil
+ notmatchpost[current]=true
+ notmatchreplace[current]=nil
+ local pre=getfield(current,"pre")
+ local replace=getfield(current,"replace")
+ if pre then
+ local n=n
+ while pre do
+ if seq[n][getchar(pre)] then
+ n=n+1
+ pre=getnext(pre)
+ if n>s then
+ break
+ end
+ else
+ notmatchpre[current]=true
+ break
+ end
+ end
+ if n<=s then
+ notmatchpre[current]=true
+ end
+ else
+ notmatchpre[current]=true
+ end
+ if replace then
+ while replace do
+ if seq[n][getchar(replace)] then
+ n=n+1
+ replace=getnext(replace)
+ if n>s then
+ break
+ end
+ else
+ notmatchreplace[current]=true
+ match=notmatchpre[current]
+ break
+ end
+ end
+ if not match then
+ break
+ end
+ else
+ end
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ current=getnext(current)
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ end
+ else
+ match=false
+ end
+ end
+ end
+ if match then
+ local diskchain=diskseen or sweepnode
+ if trace_contexts then
+ local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5]
+ local char=getchar(start)
+ if ck[9] then
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ else
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ end
+ end
+ local chainlookups=ck[6]
+ if chainlookups then
+ local nofchainlookups=#chainlookups
+ if nofchainlookups==1 then
+ local chainlookupname=chainlookups[1]
+ local chainlookup=lookuptable[chainlookupname]
+ if chainlookup then
+ local chainproc=chainprocs[chainlookup.type]
+ if chainproc then
+ local ok
+ if diskchain then
+ head,start,ok=chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence,chainproc)
+ else
+ head,start,ok=chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ end
+ if ok then
+ done=true
+ end
+ else
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ end
+ else
+ logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
+ end
+ else
+ local i=1
+ while start and true do
+ if skipped then
+ while true do
+ local char=getchar(start)
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class or "base"
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ start=getnext(start)
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ end
+ local chainlookupname=chainlookups[i]
+ local chainlookup=lookuptable[chainlookupname]
+ if not chainlookup then
+ i=i+1
+ else
+ local chainproc=chainprocs[chainlookup.type]
+ if not chainproc then
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ i=i+1
+ else
+ local ok,n
+ if diskchain then
+ head,start,ok=chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence,chainproc)
+ else
+ head,start,ok,n=chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ end
+ if ok then
+ done=true
+ if n and n>1 then
+ if i+n>nofchainlookups then
+ break
+ else
+ end
+ end
+ end
+ i=i+1
+ end
+ end
+ if i>nofchainlookups or not start then
+ break
+ elseif start then
+ start=getnext(start)
+ end
+ end
+ end
+ else
+ local replacements=ck[7]
+ if replacements then
+ head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements)
+ else
+ done=quit_on_no_replacement
+ if trace_contexts then
+ logprocess("%s: skipping match",cref(kind,chainname))
+ end
+ end
+ end
+ if done then
+ break
+ end
+ end
+ end
+ if diskseen then
+ notmatchpre={}
+ notmatchpost={}
+ notmatchreplace={}
+ end
+ return head,start,done
+end
+local verbose_handle_contextchain=function(font,...)
+ logwarning("no verbose handler installed, reverting to 'normal'")
+ otf.setcontextchain()
+ return normal_handle_contextchain(...)
+end
+otf.chainhandlers={
+ normal=normal_handle_contextchain,
+ verbose=verbose_handle_contextchain,
+}
+function otf.setcontextchain(method)
+ if not method or method=="normal" or not otf.chainhandlers[method] then
+ if handlers.contextchain then
+ logwarning("installing normal contextchain handler")
+ end
+ handlers.contextchain=normal_handle_contextchain
+ else
+ logwarning("installing contextchain handler %a",method)
+ local handler=otf.chainhandlers[method]
+ handlers.contextchain=function(...)
+ return handler(currentfont,...)
+ end
+ end
+ handlers.gsub_context=handlers.contextchain
+ handlers.gsub_contextchain=handlers.contextchain
+ handlers.gsub_reversecontextchain=handlers.contextchain
+ handlers.gpos_contextchain=handlers.contextchain
+ handlers.gpos_context=handlers.contextchain
+end
+otf.setcontextchain()
+local missing={}
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_process(...)
+end
+local logwarning=report_process
+local function report_missing_cache(typ,lookup)
+ local f=missing[currentfont] if not f then f={} missing[currentfont]=f end
+ local t=f[typ] if not t then t={} f[typ]=t end
+ if not t[lookup] then
+ t[lookup]=true
+ logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
+ end
+end
+local resolved={}
+local lookuphashes={}
+setmetatableindex(lookuphashes,function(t,font)
+ local lookuphash=fontdata[font].resources.lookuphash
+ if not lookuphash or not next(lookuphash) then
+ lookuphash=false
+ end
+ t[font]=lookuphash
+ return lookuphash
+end)
+local autofeatures=fonts.analyzers.features
+local function initialize(sequence,script,language,enabled)
+ local features=sequence.features
+ if features then
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local kind=order[i]
+ local valid=enabled[kind]
+ if valid then
+ local scripts=features[kind]
+ local languages=scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid,autofeatures[kind] or false,sequence,kind }
+ end
+ end
+ end
+ else
+ end
+ end
+ return false
+end
+function otf.dataset(tfmdata,font)
+ local shared=tfmdata.shared
+ local properties=tfmdata.properties
+ local language=properties.language or "dflt"
+ local script=properties.script or "dflt"
+ local enabled=shared.features
+ local res=resolved[font]
+ if not res then
+ res={}
+ resolved[font]=res
+ end
+ local rs=res[script]
+ if not rs then
+ rs={}
+ res[script]=rs
+ end
+ local rl=rs[language]
+ if not rl then
+ rl={
+ }
+ rs[language]=rl
+ local sequences=tfmdata.resources.sequences
+ for s=1,#sequences do
+ local v=enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1]=v
+ end
+ end
+ end
+ return rl
+end
+local function kernrun(disc,run)
+ if trace_kernruns then
+ report_run("kern")
+ end
+ local prev=getprev(disc)
+ local next=getnext(disc)
+ local pre=getfield(disc,"pre")
+ local post=getfield(disc,"post")
+ local replace=getfield(disc,"replace")
+ local prevmarks=prev
+ while prevmarks and getid(prevmarks)==glyph_code and marks[getchar(prevmarks)] and getfont(prevmarks)==currentfont and getsubtype(prevmarks)<256 do
+ prevmarks=getprev(prevmarks)
+ end
+ if prev and (pre or replace) and not (getid(prev)==glyph_code and getfont(prev)==currentfont and getsubtype(prev)<256) then
+ prev=false
+ end
+ if next and (post or replace) and not (getid(next)==glyph_code and getfont(next)==currentfont and getsubtype(next)<256) then
+ next=false
+ end
+ if not pre then
+ elseif prev then
+ local nest=getprev(pre)
+ setfield(pre,"prev",prev)
+ setfield(prev,"next",pre)
+ run(prevmarks,"preinjections")
+ setfield(pre,"prev",nest)
+ setfield(prev,"next",disc)
+ else
+ run(pre,"preinjections")
+ end
+ if not post then
+ elseif next then
+ local tail=find_node_tail(post)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ run(post,"postinjections",next)
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ else
+ run(post,"postinjections")
+ end
+ if not replace and prev and next then
+ setfield(prev,"next",next)
+ setfield(next,"prev",prev)
+ run(prevmarks,"injections",next)
+ setfield(prev,"next",disc)
+ setfield(next,"prev",disc)
+ elseif prev and next then
+ local tail=find_node_tail(replace)
+ local nest=getprev(replace)
+ setfield(replace,"prev",prev)
+ setfield(prev,"next",replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ run(prevmarks,"replaceinjections",next)
+ setfield(replace,"prev",nest)
+ setfield(prev,"next",disc)
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ elseif prev then
+ local nest=getprev(replace)
+ setfield(replace,"prev",prev)
+ setfield(prev,"next",replace)
+ run(prevmarks,"replaceinjections")
+ setfield(replace,"prev",nest)
+ setfield(prev,"next",disc)
+ elseif next then
+ local tail=find_node_tail(replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ run(replace,"replaceinjections",next)
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ else
+ run(replace,"replaceinjections")
+ end
+end
+local function comprun(disc,run)
+ if trace_compruns then
+ report_run("comp: %s",languages.serializediscretionary(disc))
+ end
+ local pre=getfield(disc,"pre")
+ if pre then
+ sweepnode=disc
+ sweeptype="pre"
+ local new,done=run(pre)
+ if done then
+ setfield(disc,"pre",new)
+ end
+ end
+ local post=getfield(disc,"post")
+ if post then
+ sweepnode=disc
+ sweeptype="post"
+ local new,done=run(post)
+ if done then
+ setfield(disc,"post",new)
+ end
+ end
+ local replace=getfield(disc,"replace")
+ if replace then
+ sweepnode=disc
+ sweeptype="replace"
+ local new,done=run(replace)
+ if done then
+ setfield(disc,"replace",new)
+ end
+ end
+ sweepnode=nil
+ sweeptype=nil
+end
+local function testrun(disc,trun,crun)
+ local next=getnext(disc)
+ if next then
+ local replace=getfield(disc,"replace")
+ if replace then
+ local prev=getprev(disc)
+ if prev then
+ local tail=find_node_tail(replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ if trun(replace,next) then
+ setfield(disc,"replace",nil)
+ setfield(prev,"next",replace)
+ setfield(replace,"prev",prev)
+ setfield(next,"prev",tail)
+ setfield(tail,"next",next)
+ setfield(disc,"prev",nil)
+ setfield(disc,"next",nil)
+ flush_node_list(disc)
+ return replace
+ else
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ end
+ else
+ end
+ else
+ end
+ else
+ end
+ comprun(disc,crun)
+ return next
+end
+local function discrun(disc,drun,krun)
+ local next=getnext(disc)
+ local prev=getprev(disc)
+ if trace_discruns then
+ report_run("disc")
+ end
+ if next and prev then
+ setfield(prev,"next",next)
+ drun(prev)
+ setfield(prev,"next",disc)
+ end
+ local pre=getfield(disc,"pre")
+ if not pre then
+ elseif prev then
+ local nest=getprev(pre)
+ setfield(pre,"prev",prev)
+ setfield(prev,"next",pre)
+ krun(prev,"preinjections")
+ setfield(pre,"prev",nest)
+ setfield(prev,"next",disc)
+ else
+ krun(pre,"preinjections")
+ end
+ return next
+end
+local function featuresprocessor(head,font,attr)
+ local lookuphash=lookuphashes[font]
+ if not lookuphash then
+ return head,false
+ end
+ head=tonut(head)
+ if trace_steps then
+ checkstep(head)
+ end
+ tfmdata=fontdata[font]
+ descriptions=tfmdata.descriptions
+ characters=tfmdata.characters
+ resources=tfmdata.resources
+ marks=resources.marks
+ anchorlookups=resources.lookup_to_anchor
+ lookuptable=resources.lookups
+ lookuptypes=resources.lookuptypes
+ lookuptags=resources.lookuptags
+ currentfont=font
+ rlmode=0
+ sweephead={}
+ local sequences=resources.sequences
+ local done=false
+ local datasets=otf.dataset(tfmdata,font,attr)
+ local dirstack={}
+ for s=1,#datasets do
+ local dataset=datasets[s]
+ featurevalue=dataset[1]
+ local attribute=dataset[2]
+ local sequence=dataset[3]
+ local kind=dataset[4]
+ local rlparmode=0
+ local topstack=0
+ local success=false
+ local typ=sequence.type
+ local gpossing=typ=="gpos_single" or typ=="gpos_pair"
+ local subtables=sequence.subtables
+ local handler=handlers[typ]
+ if typ=="gsub_reversecontextchain" then
+ local start=find_node_tail(head)
+ while start do
+ local id=getid(start)
+ if id==glyph_code then
+ if getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
+ if a then
+ a=a==attr
+ else
+ a=true
+ end
+ if a then
+ local char=getchar(start)
+ for i=1,#subtables do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ head,start,success=handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=getprev(start) end
+ else
+ start=getprev(start)
+ end
+ else
+ start=getprev(start)
+ end
+ else
+ start=getprev(start)
+ end
+ end
+ else
+ local ns=#subtables
+ local start=head
+ rlmode=0
+ if ns==1 then
+ local lookupname=subtables[1]
+ local lookupcache=lookuphash[lookupname]
+ if not lookupcache then
+ report_missing_cache(typ,lookupname)
+ else
+ local function c_run(head)
+ local done=false
+ local start=sweephead[head]
+ if start then
+ sweephead[head]=nil
+ else
+ start=head
+ end
+ while start do
+ local id=getid(start)
+ if id~=glyph_code then
+ start=getnext(start)
+ elseif getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
+ else
+ a=not attribute or getprop(start,a_state)==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[getchar(start)]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done=true
+ end
+ end
+ if start then start=getnext(start) end
+ else
+ start=getnext(start)
+ end
+ else
+ return head,false
+ end
+ end
+ if done then
+ success=true
+ end
+ return head,done
+ end
+ local function t_run(start,stop)
+ while start~=stop do
+ local id=getid(start)
+ if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
+ else
+ a=not attribute or getprop(start,a_state)==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[getchar(start)]
+ if lookupmatch then
+ local s=getnext(start)
+ local l=nil
+ while s do
+ local lg=lookupmatch[getchar(s)]
+ if lg then
+ l=lg
+ s=getnext(s)
+ else
+ break
+ end
+ end
+ if l and l.ligature then
+ return true
+ end
+ end
+ end
+ start=getnext(start)
+ else
+ break
+ end
+ end
+ end
+ local function d_run(prev)
+ local a=getattr(prev,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute)
+ else
+ a=not attribute or getprop(prev,a_state)==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[getchar(prev)]
+ if lookupmatch then
+ local h,d,ok=handler(head,prev,kind,lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done=true
+ success=true
+ end
+ end
+ end
+ end
+ local function k_run(sub,injection,last)
+ local a=getattr(sub,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(sub,a_state)==attribute)
+ else
+ a=not attribute or getprop(sub,a_state)==attribute
+ end
+ if a then
+ for n in traverse_nodes(sub) do
+ if n==last then
+ break
+ end
+ local id=getid(n)
+ if id==glyph_code then
+ local lookupmatch=lookupcache[getchar(n)]
+ if lookupmatch then
+ local h,d,ok=handler(sub,n,kind,lookupname,lookupmatch,sequence,lookuphash,1,injection)
+ if ok then
+ done=true
+ success=true
+ end
+ end
+ else
+ end
+ end
+ end
+ end
+ while start do
+ local id=getid(start)
+ if id==glyph_code then
+ if getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
+ else
+ a=not attribute or getprop(start,a_state)==attribute
+ end
+ if a then
+ local char=getchar(start)
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success=true
+ elseif gpossing and zwnjruns and char==zwnj then
+ discrun(start,d_run)
+ end
+ elseif gpossing and zwnjruns and char==zwnj then
+ discrun(start,d_run)
+ end
+ if start then start=getnext(start) end
+ else
+ start=getnext(start)
+ end
+ else
+ start=getnext(start)
+ end
+ elseif id==disc_code then
+ if gpossing then
+ kernrun(start,k_run)
+ start=getnext(start)
+ elseif typ=="gsub_ligature" then
+ start=testrun(start,t_run,c_run)
+ else
+ comprun(start,c_run)
+ start=getnext(start)
+ end
+ elseif id==math_code then
+ start=getnext(end_of_math(start))
+ else
+ start=getnext(start)
+ end
+ end
+ end
+ else
+ local function c_run(head)
+ local done=false
+ local start=sweephead[head]
+ if start then
+ sweephead[head]=nil
+ else
+ start=head
+ end
+ while start do
+ local id=getid(start)
+ if id~=glyph_code then
+ start=getnext(start)
+ elseif getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
+ else
+ a=not attribute or getprop(start,a_state)==attribute
+ end
+ if a then
+ local char=getchar(start)
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done=true
+ break
+ elseif not start then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=getnext(start) end
+ else
+ start=getnext(start)
+ end
+ else
+ return head,false
+ end
+ end
+ if done then
+ success=true
+ end
+ return head,done
+ end
+ local function d_run(prev)
+ local a=getattr(prev,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute)
+ else
+ a=not attribute or getprop(prev,a_state)==attribute
+ end
+ if a then
+ local char=getchar(prev)
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local h,d,ok=handler(head,prev,kind,lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done=true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ end
+ end
+ local function k_run(sub,injection,last)
+ local a=getattr(sub,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(sub,a_state)==attribute)
+ else
+ a=not attribute or getprop(sub,a_state)==attribute
+ end
+ if a then
+ for n in traverse_nodes(sub) do
+ if n==last then
+ break
+ end
+ local id=getid(n)
+ if id==glyph_code then
+ local char=getchar(n)
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local h,d,ok=handler(head,n,kind,lookupname,lookupmatch,sequence,lookuphash,i,injection)
+ if ok then
+ done=true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ else
+ end
+ end
+ end
+ end
+ local function t_run(start,stop)
+ while start~=stop do
+ local id=getid(start)
+ if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
+ else
+ a=not attribute or getprop(start,a_state)==attribute
+ end
+ if a then
+ local char=getchar(start)
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local s=getnext(start)
+ local l=nil
+ while s do
+ local lg=lookupmatch[getchar(s)]
+ if lg then
+ l=lg
+ s=getnext(s)
+ else
+ break
+ end
+ end
+ if l and l.ligature then
+ return true
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ end
+ start=getnext(start)
+ else
+ break
+ end
+ end
+ end
+ while start do
+ local id=getid(start)
+ if id==glyph_code then
+ if getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
+ else
+ a=not attribute or getprop(start,a_state)==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local char=getchar(start)
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success=true
+ break
+ elseif not start then
+ break
+ elseif gpossing and zwnjruns and char==zwnj then
+ discrun(start,d_run)
+ end
+ elseif gpossing and zwnjruns and char==zwnj then
+ discrun(start,d_run)
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=getnext(start) end
+ else
+ start=getnext(start)
+ end
+ else
+ start=getnext(start)
+ end
+ elseif id==disc_code then
+ if gpossing then
+ kernrun(start,k_run)
+ start=getnext(start)
+ elseif typ=="gsub_ligature" then
+ start=testrun(start,t_run,c_run)
+ else
+ comprun(start,c_run)
+ start=getnext(start)
+ end
+ elseif id==math_code then
+ start=getnext(end_of_math(start))
+ else
+ start=getnext(start)
+ end
+ end
+ end
+ end
+ if success then
+ done=true
+ end
+ if trace_steps then
+ registerstep(head)
+ end
+ end
+ head=tonode(head)
+ return head,done
+end
+local function generic(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if target then
+ target[unicode]=lookupdata
+ else
+ lookuphash[lookupname]={ [unicode]=lookupdata }
+ end
+end
+local function ligature(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
+ end
+ for i=1,#lookupdata do
+ local li=lookupdata[i]
+ local tu=target[li]
+ if not tu then
+ tu={}
+ target[li]=tu
+ end
+ target=tu
+ end
+ target.ligature=unicode
+end
+local function pair(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
+ end
+ local others=target[unicode]
+ local paired=lookupdata[1]
+ if others then
+ others[paired]=lookupdata
+ else
+ others={ [paired]=lookupdata }
+ target[unicode]=others
+ end
+end
+local action={
+ substitution=generic,
+ multiple=generic,
+ alternate=generic,
+ position=generic,
+ ligature=ligature,
+ pair=pair,
+ kern=pair,
+}
+local function prepare_lookups(tfmdata)
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local anchor_to_lookup=resources.anchor_to_lookup
+ local lookup_to_anchor=resources.lookup_to_anchor
+ local lookuptypes=resources.lookuptypes
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local duplicates=resources.duplicates
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ if description then
+ local lookups=description.slookups
+ if lookups then
+ for lookupname,lookupdata in next,lookups do
+ action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash,duplicates)
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for lookupname,lookuplist in next,lookups do
+ local lookuptype=lookuptypes[lookupname]
+ for l=1,#lookuplist do
+ local lookupdata=lookuplist[l]
+ action[lookuptype](lookupdata,lookupname,unicode,lookuphash,duplicates)
+ end
+ end
+ end
+ local list=description.kerns
+ if list then
+ for lookup,krn in next,list do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=krn
+ else
+ lookuphash[lookup]={ [unicode]=krn }
+ end
+ end
+ end
+ local list=description.anchors
+ if list then
+ for typ,anchors in next,list do
+ if typ=="mark" or typ=="cexit" then
+ for name,anchor in next,anchors do
+ local lookups=anchor_to_lookup[name]
+ if lookups then
+ for lookup in next,lookups do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=anchors
+ else
+ lookuphash[lookup]={ [unicode]=anchors }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+local function split(replacement,original)
+ local result={}
+ for i=1,#replacement do
+ result[original[i]]=replacement[i]
+ end
+ return result
+end
+local valid={
+ coverage={ chainsub=true,chainpos=true,contextsub=true },
+ reversecoverage={ reversesub=true },
+ glyphs={ chainsub=true,chainpos=true },
+}
+local function prepare_contextchains(tfmdata)
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local lookuptags=resources.lookuptags
+ local lookups=rawdata.lookups
+ if lookups then
+ for lookupname,lookupdata in next,rawdata.lookups do
+ local lookuptype=lookupdata.type
+ if lookuptype then
+ local rules=lookupdata.rules
+ if rules then
+ local format=lookupdata.format
+ local validformat=valid[format]
+ if not validformat then
+ report_prepare("unsupported format %a",format)
+ elseif not validformat[lookuptype] then
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname])
+ else
+ local contexts=lookuphash[lookupname]
+ if not contexts then
+ contexts={}
+ lookuphash[lookupname]=contexts
+ end
+ local t,nt={},0
+ for nofrules=1,#rules do
+ local rule=rules[nofrules]
+ local current=rule.current
+ local before=rule.before
+ local after=rule.after
+ local replacements=rule.replacements
+ local sequence={}
+ local nofsequences=0
+ if before then
+ for n=1,#before do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=before[n]
+ end
+ end
+ local start=nofsequences+1
+ for n=1,#current do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=current[n]
+ end
+ local stop=nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=after[n]
+ end
+ end
+ if sequence[1] then
+ nt=nt+1
+ t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements }
+ for unic in next,sequence[start] do
+ local cu=contexts[unic]
+ if not cu then
+ contexts[unic]=t
+ end
+ end
+ end
+ end
+ end
+ else
+ end
+ else
+ report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname])
+ end
+ end
+ end
+end
+local function featuresinitializer(tfmdata,value)
+ if true then
+ local rawdata=tfmdata.shared.rawdata
+ local properties=rawdata.properties
+ if not properties.initialized then
+ local starttime=trace_preparing and os.clock()
+ local resources=rawdata.resources
+ resources.lookuphash=resources.lookuphash or {}
+ prepare_contextchains(tfmdata)
+ prepare_lookups(tfmdata)
+ properties.initialized=true
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
+ end
+ end
+ end
+end
+registerotffeature {
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ position=1,
+ node=featuresinitializer,
+ },
+ processors={
+ node=featuresprocessor,
+ }
+}
+otf.handlers=handlers
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otp']={
+ version=1.001,
+ comment="companion to font-otf.lua (packing)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,type,tostring=next,type,tostring
+local sort,concat=table.sort,table.concat
+local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end)
+local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+fonts=fonts or {}
+local handlers=fonts.handlers or {}
+fonts.handlers=handlers
+local otf=handlers.otf or {}
+handlers.otf=otf
+local enhancers=otf.enhancers or {}
+otf.enhancers=enhancers
+local glists=otf.glists or { "gsub","gpos" }
+otf.glists=glists
+local criterium=1
+local threshold=0
+local function tabstr_normal(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ if type(v)=="table" then
+ s[n]=k..">"..tabstr_normal(v)
+ elseif v==true then
+ s[n]=k.."+"
+ elseif v then
+ s[n]=k.."="..v
+ else
+ s[n]=k.."-"
+ end
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function tabstr_flat(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ s[n]=k.."="..v
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function tabstr_mixed(t)
+ local s={}
+ local n=#t
+ if n==0 then
+ return ""
+ elseif n==1 then
+ local k=t[1]
+ if k==true then
+ return "++"
+ elseif k==false then
+ return "--"
+ else
+ return tostring(k)
+ end
+ else
+ for i=1,n do
+ local k=t[i]
+ if k==true then
+ s[i]="++"
+ elseif k==false then
+ s[i]="--"
+ else
+ s[i]=k
+ end
+ end
+ return concat(s,",")
+ end
+end
+local function tabstr_boolean(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ if v then
+ s[n]=k.."+"
+ else
+ s[n]=k.."-"
+ end
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function packdata(data)
+ if data then
+ local h,t,c={},{},{}
+ local hh,tt,cc={},{},{}
+ local nt,ntt=0,0
+ local function pack_normal(v)
+ local tag=tabstr_normal(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_flat(v)
+ local tag=tabstr_flat(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_boolean(v)
+ local tag=tabstr_boolean(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_indexed(v)
+ local tag=concat(v," ")
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_mixed(v)
+ local tag=tabstr_mixed(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_final(v)
+ if c[v]<=criterium then
+ return t[v]
+ else
+ local hv=hh[v]
+ if hv then
+ return hv
+ else
+ ntt=ntt+1
+ tt[ntt]=t[v]
+ hh[v]=ntt
+ cc[ntt]=c[v]
+ return ntt
+ end
+ end
+ end
+ local function success(stage,pass)
+ if nt==0 then
+ if trace_loading or trace_packing then
+ report_otf("pack quality: nothing to pack")
+ end
+ return false
+ elseif nt>=threshold then
+ local one,two,rest=0,0,0
+ if pass==1 then
+ for k,v in next,c do
+ if v==1 then
+ one=one+1
+ elseif v==2 then
+ two=two+1
+ else
+ rest=rest+1
+ end
+ end
+ else
+ for k,v in next,cc do
+ if v>20 then
+ rest=rest+1
+ elseif v>10 then
+ two=two+1
+ else
+ one=one+1
+ end
+ end
+ data.tables=tt
+ end
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",stage,pass,one+two+rest,one,two,rest,criterium)
+ end
+ return true
+ else
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",stage,pass,nt,threshold)
+ end
+ return false
+ end
+ end
+ local function packers(pass)
+ if pass==1 then
+ return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed
+ else
+ return pack_final,pack_final,pack_final,pack_final,pack_final
+ end
+ end
+ local resources=data.resources
+ local lookuptypes=resources.lookuptypes
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 1, pass %s",pass)
+ end
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,data.descriptions do
+ local boundingbox=description.boundingbox
+ if boundingbox then
+ description.boundingbox=pack_indexed(boundingbox)
+ end
+ local slookups=description.slookups
+ if slookups then
+ for tag,slookup in next,slookups do
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ local t=slookup[2] if t then slookup[2]=pack_indexed(t) end
+ local t=slookup[3] if t then slookup[3]=pack_indexed(t) end
+ elseif what~="substitution" then
+ slookups[tag]=pack_indexed(slookup)
+ end
+ end
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ for tag,mlookup in next,mlookups do
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ for i=1,#mlookup do
+ local lookup=mlookup[i]
+ local t=lookup[2] if t then lookup[2]=pack_indexed(t) end
+ local t=lookup[3] if t then lookup[3]=pack_indexed(t) end
+ end
+ elseif what~="substitution" then
+ for i=1,#mlookup do
+ mlookup[i]=pack_indexed(mlookup[i])
+ end
+ end
+ end
+ end
+ local kerns=description.kerns
+ if kerns then
+ for tag,kern in next,kerns do
+ kerns[tag]=pack_flat(kern)
+ end
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ for tag,kern in next,kerns do
+ kerns[tag]=pack_normal(kern)
+ end
+ end
+ end
+ local anchors=description.anchors
+ if anchors then
+ for what,anchor in next,anchors do
+ if what=="baselig" then
+ for _,a in next,anchor do
+ for k=1,#a do
+ a[k]=pack_indexed(a[k])
+ end
+ end
+ else
+ for k,v in next,anchor do
+ anchor[k]=pack_indexed(v)
+ end
+ end
+ end
+ end
+ local altuni=description.altuni
+ if altuni then
+ for i=1,#altuni do
+ altuni[i]=pack_flat(altuni[i])
+ end
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local rules=lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.replacements if r then rule.replacements=pack_flat (r) end
+ local r=rule.lookups if r then rule.lookups=pack_indexed(r) end
+ end
+ end
+ end
+ end
+ local anchor_to_lookup=resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor,lookup in next,anchor_to_lookup do
+ anchor_to_lookup[anchor]=pack_normal(lookup)
+ end
+ end
+ local lookup_to_anchor=resources.lookup_to_anchor
+ if lookup_to_anchor then
+ for lookup,anchor in next,lookup_to_anchor do
+ lookup_to_anchor[lookup]=pack_normal(anchor)
+ end
+ end
+ local sequences=resources.sequences
+ if sequences then
+ for feature,sequence in next,sequences do
+ local flags=sequence.flags
+ if flags then
+ sequence.flags=pack_normal(flags)
+ end
+ local subtables=sequence.subtables
+ if subtables then
+ sequence.subtables=pack_normal(subtables)
+ end
+ local features=sequence.features
+ if features then
+ for script,feature in next,features do
+ features[script]=pack_normal(feature)
+ end
+ end
+ local order=sequence.order
+ if order then
+ sequence.order=pack_indexed(order)
+ end
+ local markclass=sequence.markclass
+ if markclass then
+ sequence.markclass=pack_boolean(markclass)
+ end
+ end
+ end
+ local lookups=resources.lookups
+ if lookups then
+ for name,lookup in next,lookups do
+ local flags=lookup.flags
+ if flags then
+ lookup.flags=pack_normal(flags)
+ end
+ local subtables=lookup.subtables
+ if subtables then
+ lookup.subtables=pack_normal(subtables)
+ end
+ end
+ end
+ local features=resources.features
+ if features then
+ for _,what in next,glists do
+ local list=features[what]
+ if list then
+ for feature,spec in next,list do
+ list[feature]=pack_normal(spec)
+ end
+ end
+ end
+ end
+ if not success(1,pass) then
+ return
+ end
+ end
+ if nt>0 then
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 2, pass %s",pass)
+ end
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,data.descriptions do
+ local kerns=description.kerns
+ if kerns then
+ description.kerns=pack_normal(kerns)
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ math.kerns=pack_normal(kerns)
+ end
+ end
+ local anchors=description.anchors
+ if anchors then
+ description.anchors=pack_normal(anchors)
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ for tag,mlookup in next,mlookups do
+ mlookups[tag]=pack_normal(mlookup)
+ end
+ end
+ local altuni=description.altuni
+ if altuni then
+ description.altuni=pack_normal(altuni)
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local rules=lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local r=rule.before if r then rule.before=pack_normal(r) end
+ local r=rule.after if r then rule.after=pack_normal(r) end
+ local r=rule.current if r then rule.current=pack_normal(r) end
+ end
+ end
+ end
+ end
+ local sequences=resources.sequences
+ if sequences then
+ for feature,sequence in next,sequences do
+ sequence.features=pack_normal(sequence.features)
+ end
+ end
+ if not success(2,pass) then
+ end
+ end
+ for pass=1,2 do
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,data.descriptions do
+ local slookups=description.slookups
+ if slookups then
+ description.slookups=pack_normal(slookups)
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ description.mlookups=pack_normal(mlookups)
+ end
+ end
+ end
+ end
+ end
+end
+local unpacked_mt={
+ __index=function(t,k)
+ t[k]=false
+ return k
+ end
+}
+local function unpackdata(data)
+ if data then
+ local tables=data.tables
+ if tables then
+ local resources=data.resources
+ local lookuptypes=resources.lookuptypes
+ local unpacked={}
+ setmetatable(unpacked,unpacked_mt)
+ for unicode,description in next,data.descriptions do
+ local tv=tables[description.boundingbox]
+ if tv then
+ description.boundingbox=tv
+ end
+ local slookups=description.slookups
+ if slookups then
+ local tv=tables[slookups]
+ if tv then
+ description.slookups=tv
+ slookups=unpacked[tv]
+ end
+ if slookups then
+ for tag,lookup in next,slookups do
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ local tv=tables[lookup[2]]
+ if tv then
+ lookup[2]=tv
+ end
+ local tv=tables[lookup[3]]
+ if tv then
+ lookup[3]=tv
+ end
+ elseif what~="substitution" then
+ local tv=tables[lookup]
+ if tv then
+ slookups[tag]=tv
+ end
+ end
+ end
+ end
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ local tv=tables[mlookups]
+ if tv then
+ description.mlookups=tv
+ mlookups=unpacked[tv]
+ end
+ if mlookups then
+ for tag,list in next,mlookups do
+ local tv=tables[list]
+ if tv then
+ mlookups[tag]=tv
+ list=unpacked[tv]
+ end
+ if list then
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ for i=1,#list do
+ local lookup=list[i]
+ local tv=tables[lookup[2]]
+ if tv then
+ lookup[2]=tv
+ end
+ local tv=tables[lookup[3]]
+ if tv then
+ lookup[3]=tv
+ end
+ end
+ elseif what~="substitution" then
+ for i=1,#list do
+ local tv=tables[list[i]]
+ if tv then
+ list[i]=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ local kerns=description.kerns
+ if kerns then
+ local tm=tables[kerns]
+ if tm then
+ description.kerns=tm
+ kerns=unpacked[tm]
+ end
+ if kerns then
+ for k,kern in next,kerns do
+ local tv=tables[kern]
+ if tv then
+ kerns[k]=tv
+ end
+ end
+ end
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ local tm=tables[kerns]
+ if tm then
+ math.kerns=tm
+ kerns=unpacked[tm]
+ end
+ if kerns then
+ for k,kern in next,kerns do
+ local tv=tables[kern]
+ if tv then
+ kerns[k]=tv
+ end
+ end
+ end
+ end
+ end
+ local anchors=description.anchors
+ if anchors then
+ local ta=tables[anchors]
+ if ta then
+ description.anchors=ta
+ anchors=unpacked[ta]
+ end
+ if anchors then
+ for tag,anchor in next,anchors do
+ if tag=="baselig" then
+ for _,list in next,anchor do
+ for i=1,#list do
+ local tv=tables[list[i]]
+ if tv then
+ list[i]=tv
+ end
+ end
+ end
+ else
+ for a,data in next,anchor do
+ local tv=tables[data]
+ if tv then
+ anchor[a]=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ local altuni=description.altuni
+ if altuni then
+ local altuni=tables[altuni]
+ if altuni then
+ description.altuni=altuni
+ for i=1,#altuni do
+ local tv=tables[altuni[i]]
+ if tv then
+ altuni[i]=tv
+ end
+ end
+ end
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local rules=lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local before=rule.before
+ if before then
+ local tv=tables[before]
+ if tv then
+ rule.before=tv
+ before=unpacked[tv]
+ end
+ if before then
+ for i=1,#before do
+ local tv=tables[before[i]]
+ if tv then
+ before[i]=tv
+ end
+ end
+ end
+ end
+ local after=rule.after
+ if after then
+ local tv=tables[after]
+ if tv then
+ rule.after=tv
+ after=unpacked[tv]
+ end
+ if after then
+ for i=1,#after do
+ local tv=tables[after[i]]
+ if tv then
+ after[i]=tv
+ end
+ end
+ end
+ end
+ local current=rule.current
+ if current then
+ local tv=tables[current]
+ if tv then
+ rule.current=tv
+ current=unpacked[tv]
+ end
+ if current then
+ for i=1,#current do
+ local tv=tables[current[i]]
+ if tv then
+ current[i]=tv
+ end
+ end
+ end
+ end
+ local replacements=rule.replacements
+ if replacements then
+ local tv=tables[replacements]
+ if tv then
+ rule.replacements=tv
+ end
+ end
+ local lookups=rule.lookups
+ if lookups then
+ local tv=tables[lookups]
+ if tv then
+ rule.lookups=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ local anchor_to_lookup=resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor,lookup in next,anchor_to_lookup do
+ local tv=tables[lookup]
+ if tv then
+ anchor_to_lookup[anchor]=tv
+ end
+ end
+ end
+ local lookup_to_anchor=resources.lookup_to_anchor
+ if lookup_to_anchor then
+ for lookup,anchor in next,lookup_to_anchor do
+ local tv=tables[anchor]
+ if tv then
+ lookup_to_anchor[lookup]=tv
+ end
+ end
+ end
+ local ls=resources.sequences
+ if ls then
+ for _,feature in next,ls do
+ local flags=feature.flags
+ if flags then
+ local tv=tables[flags]
+ if tv then
+ feature.flags=tv
+ end
+ end
+ local subtables=feature.subtables
+ if subtables then
+ local tv=tables[subtables]
+ if tv then
+ feature.subtables=tv
+ end
+ end
+ local features=feature.features
+ if features then
+ local tv=tables[features]
+ if tv then
+ feature.features=tv
+ features=unpacked[tv]
+ end
+ if features then
+ for script,data in next,features do
+ local tv=tables[data]
+ if tv then
+ features[script]=tv
+ end
+ end
+ end
+ end
+ local order=feature.order
+ if order then
+ local tv=tables[order]
+ if tv then
+ feature.order=tv
+ end
+ end
+ local markclass=feature.markclass
+ if markclass then
+ local tv=tables[markclass]
+ if tv then
+ feature.markclass=tv
+ end
+ end
+ end
+ end
+ local lookups=resources.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local flags=lookup.flags
+ if flags then
+ local tv=tables[flags]
+ if tv then
+ lookup.flags=tv
+ end
+ end
+ local subtables=lookup.subtables
+ if subtables then
+ local tv=tables[subtables]
+ if tv then
+ lookup.subtables=tv
+ end
+ end
+ end
+ end
+ local features=resources.features
+ if features then
+ for _,what in next,glists do
+ local feature=features[what]
+ if feature then
+ for tag,spec in next,feature do
+ local tv=tables[spec]
+ if tv then
+ feature[tag]=tv
+ end
+ end
+ end
+ end
+ end
+ data.tables=nil
+ end
+ end
+end
+if otf.enhancers.register then
+ otf.enhancers.register("pack",packdata)
+ otf.enhancers.register("unpack",unpackdata)
+end
+otf.enhancers.unpack=unpackdata
+otf.enhancers.pack=packdata
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-lua']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.formats.lua="lua"
+function fonts.readers.lua(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ local fullname=resolvers.findfile(fullname) or ""
+ if fullname~="" then
+ local loader=loadfile(fullname)
+ loader=loader and loader()
+ return loader and loader(specification)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-def']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub
+local tostring,next=tostring,next
+local lpegmatch=lpeg.match
+local suffixonly,removesuffix=file.suffix,file.removesuffix
+local allocate=utilities.storage.allocate
+local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end)
+local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end)
+trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading")
+trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*")
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local fontdata=fonts.hashes.identifiers
+local readers=fonts.readers
+local definers=fonts.definers
+local specifiers=fonts.specifiers
+local constructors=fonts.constructors
+local fontgoodies=fonts.goodies
+readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' }
+local variants=allocate()
+specifiers.variants=variants
+definers.methods=definers.methods or {}
+local internalized=allocate()
+local lastdefined=nil
+local loadedfonts=constructors.loadedfonts
+local designsizes=constructors.designsizes
+local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
+local splitter,splitspecifiers=nil,""
+local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc
+local left=P("(")
+local right=P(")")
+local colon=P(":")
+local space=P(" ")
+definers.defaultlookup="file"
+local prefixpattern=P(false)
+local function addspecifier(symbol)
+ splitspecifiers=splitspecifiers..symbol
+ local method=S(splitspecifiers)
+ local lookup=C(prefixpattern)*colon
+ local sub=left*C(P(1-left-right-method)^1)*right
+ local specification=C(method)*C(P(1)^1)
+ local name=C((1-sub-specification)^1)
+ splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc("")))
+end
+local function addlookup(str,default)
+ prefixpattern=prefixpattern+P(str)
+end
+definers.addlookup=addlookup
+addlookup("file")
+addlookup("name")
+addlookup("spec")
+local function getspecification(str)
+ return lpegmatch(splitter,str or "")
+end
+definers.getspecification=getspecification
+function definers.registersplit(symbol,action,verbosename)
+ addspecifier(symbol)
+ variants[symbol]=action
+ if verbosename then
+ variants[verbosename]=action
+ end
+end
+local function makespecification(specification,lookup,name,sub,method,detail,size)
+ size=size or 655360
+ if not lookup or lookup=="" then
+ lookup=definers.defaultlookup
+ end
+ if trace_defining then
+ report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a",
+ specification,lookup,name,sub,method,detail)
+ end
+ local t={
+ lookup=lookup,
+ specification=specification,
+ size=size,
+ name=name,
+ sub=sub,
+ method=method,
+ detail=detail,
+ resolved="",
+ forced="",
+ features={},
+ }
+ return t
+end
+definers.makespecification=makespecification
+function definers.analyze(specification,size)
+ local lookup,name,sub,method,detail=getspecification(specification or "")
+ return makespecification(specification,lookup,name,sub,method,detail,size)
+end
+definers.resolvers=definers.resolvers or {}
+local resolvers=definers.resolvers
+function resolvers.file(specification)
+ local name=resolvefile(specification.name)
+ local suffix=lower(suffixonly(name))
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.forcedname=name
+ specification.name=removesuffix(name)
+ else
+ specification.name=name
+ end
+end
+function resolvers.name(specification)
+ local resolve=fonts.names.resolve
+ if resolve then
+ local resolved,sub,subindex=resolve(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ specification.subindex=subindex
+ local suffix=lower(suffixonly(resolved))
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.forcedname=resolved
+ specification.name=removesuffix(resolved)
+ else
+ specification.name=resolved
+ end
+ end
+ else
+ resolvers.file(specification)
+ end
+end
+function resolvers.spec(specification)
+ local resolvespec=fonts.names.resolvespec
+ if resolvespec then
+ local resolved,sub,subindex=resolvespec(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ specification.subindex=subindex
+ specification.forced=lower(suffixonly(resolved))
+ specification.forcedname=resolved
+ specification.name=removesuffix(resolved)
+ end
+ else
+ resolvers.name(specification)
+ end
+end
+function definers.resolve(specification)
+ if not specification.resolved or specification.resolved=="" then
+ local r=resolvers[specification.lookup]
+ if r then
+ r(specification)
+ end
+ end
+ if specification.forced=="" then
+ specification.forced=nil
+ specification.forcedname=nil
+ end
+ specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification))
+ if specification.sub and specification.sub~="" then
+ specification.hash=specification.sub..' @ '..specification.hash
+ end
+ return specification
+end
+function definers.applypostprocessors(tfmdata)
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ local properties=tfmdata.properties
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=gsub(lower(extrahash),"[^a-z]","-")
+ properties.fullname=format("%s-%s",properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
+local function checkembedding(tfmdata)
+ local properties=tfmdata.properties
+ local embedding
+ if directive_embedall then
+ embedding="full"
+ elseif properties and properties.filename and constructors.dontembed[properties.filename] then
+ embedding="no"
+ else
+ embedding="subset"
+ end
+ if properties then
+ properties.embedding=embedding
+ else
+ tfmdata.properties={ embedding=embedding }
+ end
+ tfmdata.embedding=embedding
+end
+function definers.loadfont(specification)
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=loadedfonts[hash]
+ if not tfmdata then
+ local forced=specification.forced or ""
+ if forced~="" then
+ local reader=readers[lower(forced)]
+ tfmdata=reader and reader(specification)
+ if not tfmdata then
+ report_defining("forced type %a of %a not found",forced,specification.name)
+ end
+ else
+ local sequence=readers.sequence
+ for s=1,#sequence do
+ local reader=sequence[s]
+ if readers[reader] then
+ if trace_defining then
+ report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename)
+ end
+ tfmdata=readers[reader](specification)
+ if tfmdata then
+ break
+ else
+ specification.filename=nil
+ end
+ end
+ end
+ end
+ if tfmdata then
+ tfmdata=definers.applypostprocessors(tfmdata)
+ checkembedding(tfmdata)
+ loadedfonts[hash]=tfmdata
+ designsizes[specification.hash]=tfmdata.parameters.designsize
+ end
+ end
+ if not tfmdata then
+ report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup)
+ end
+ return tfmdata
+end
+function constructors.checkvirtualids()
+end
+function constructors.readanddefine(name,size)
+ local specification=definers.analyze(name,size)
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local id=definers.registered(hash)
+ if not id then
+ local tfmdata=definers.loadfont(specification)
+ if tfmdata then
+ tfmdata.properties.hash=hash
+ constructors.checkvirtualids(tfmdata)
+ id=font.define(tfmdata)
+ definers.register(tfmdata,id)
+ else
+ id=0
+ end
+ end
+ return fontdata[id],id
+end
+function definers.current()
+ return lastdefined
+end
+function definers.registered(hash)
+ local id=internalized[hash]
+ return id,id and fontdata[id]
+end
+function definers.register(tfmdata,id)
+ if tfmdata and id then
+ local hash=tfmdata.properties.hash
+ if not hash then
+ report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?")
+ elseif not internalized[hash] then
+ internalized[hash]=id
+ if trace_defining then
+ report_defining("registering font, id %s, hash %a",id,hash)
+ end
+ fontdata[id]=tfmdata
+ end
+ end
+end
+function definers.read(specification,size,id)
+ statistics.starttiming(fonts)
+ if type(specification)=="string" then
+ specification=definers.analyze(specification,size)
+ end
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=definers.registered(hash)
+ if tfmdata then
+ if trace_defining then
+ report_defining("already hashed: %s",hash)
+ end
+ else
+ tfmdata=definers.loadfont(specification)
+ if tfmdata then
+ if trace_defining then
+ report_defining("loaded and hashed: %s",hash)
+ end
+ tfmdata.properties.hash=hash
+ if id then
+ definers.register(tfmdata,id)
+ end
+ else
+ if trace_defining then
+ report_defining("not loaded and hashed: %s",hash)
+ end
+ end
+ end
+ lastdefined=tfmdata or id
+ if not tfmdata then
+ report_defining("unknown font %a, loading aborted",specification.name)
+ elseif trace_defining and type(tfmdata)=="table" then
+ local properties=tfmdata.properties or {}
+ local parameters=tfmdata.parameters or {}
+ report_defining("using %a font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
+ properties.format or "unknown",id,properties.name,parameters.size,properties.encodingbytes,
+ properties.encodingname,properties.fullname,file.basename(properties.filename))
+ end
+ statistics.stoptiming(fonts)
+ return tfmdata
+end
+function font.getfont(id)
+ return fontdata[id]
+end
+callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)")
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-font-def']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.constructors.namemode="specification"
+function fonts.definers.getspecification(str)
+ return "",str,"",":",str
+end
+local list={}
+local function issome () list.lookup='name' end
+local function isfile () list.lookup='file' end
+local function isname () list.lookup='name' end
+local function thename(s) list.name=s end
+local function issub (v) list.sub=v end
+local function iscrap (s) list.crap=string.lower(s) end
+local function iskey (k,v) list[k]=v end
+local function istrue (s) list[s]=true end
+local function isfalse(s) list[s]=false end
+local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C
+local spaces=P(" ")^0
+local namespec=(1-S("/:("))^0
+local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces
+local filename_1=P("file:")/isfile*(namespec/thename)
+local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]")
+local fontname_1=P("name:")/isname*(namespec/thename)
+local fontname_2=P(true)/issome*(namespec/thename)
+local sometext=(R("az","AZ","09")+S("+-."))^1
+local truevalue=P("+")*spaces*(sometext/istrue)
+local falsevalue=P("-")*spaces*(sometext/isfalse)
+local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey
+local somevalue=sometext/istrue
+local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")")
+local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces
+local options=P(":")*spaces*(P(";")^0*option)^0
+local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0
+local function colonized(specification)
+ list={}
+ lpeg.match(pattern,specification.specification)
+ list.crap=nil
+ if list.name then
+ specification.name=list.name
+ list.name=nil
+ end
+ if list.lookup then
+ specification.lookup=list.lookup
+ list.lookup=nil
+ end
+ if list.sub then
+ specification.sub=list.sub
+ list.sub=nil
+ end
+ specification.features.normal=fonts.handlers.otf.features.normalize(list)
+ return specification
+end
+fonts.definers.registersplit(":",colonized,"cryptic")
+fonts.definers.registersplit("",colonized,"more cryptic")
+function fonts.definers.applypostprocessors(tfmdata)
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=string.gsub(lower(extrahash),"[^a-z]","-")
+ tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-ext']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local otffeatures=fonts.constructors.newfeatures("otf")
+local function initializeitlc(tfmdata,value)
+ if value then
+ local parameters=tfmdata.parameters
+ local italicangle=parameters.italicangle
+ if italicangle and italicangle~=0 then
+ local properties=tfmdata.properties
+ local factor=tonumber(value) or 1
+ properties.hasitalics=true
+ properties.autoitalicamount=factor*(parameters.uwidth or 40)/2
+ end
+ end
+end
+otffeatures.register {
+ name="itlc",
+ description="italic correction",
+ initializers={
+ base=initializeitlc,
+ node=initializeitlc,
+ }
+}
+local function initializeslant(tfmdata,value)
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>1 then
+ value=1
+ elseif value<-1 then
+ value=-1
+ end
+ tfmdata.parameters.slantfactor=value
+end
+otffeatures.register {
+ name="slant",
+ description="slant glyphs",
+ initializers={
+ base=initializeslant,
+ node=initializeslant,
+ }
+}
+local function initializeextend(tfmdata,value)
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>10 then
+ value=10
+ elseif value<-10 then
+ value=-10
+ end
+ tfmdata.parameters.extendfactor=value
+end
+otffeatures.register {
+ name="extend",
+ description="scale glyphs horizontally",
+ initializers={
+ base=initializeextend,
+ node=initializeextend,
+ }
+}
+fonts.protrusions=fonts.protrusions or {}
+fonts.protrusions.setups=fonts.protrusions.setups or {}
+local setups=fonts.protrusions.setups
+local function initializeprotrusion(tfmdata,value)
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1
+ local emwidth=tfmdata.parameters.quad
+ tfmdata.parameters.protrusion={
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v,pl,pr=setup[i],nil,nil
+ if v then
+ pl,pr=v[1],v[2]
+ end
+ if pl and pl~=0 then chr.left_protruding=left*pl*factor end
+ if pr and pr~=0 then chr.right_protruding=right*pr*factor end
+ end
+ end
+ end
+end
+otffeatures.register {
+ name="protrusion",
+ description="shift characters into the left and or right margin",
+ initializers={
+ base=initializeprotrusion,
+ node=initializeprotrusion,
+ }
+}
+fonts.expansions=fonts.expansions or {}
+fonts.expansions.setups=fonts.expansions.setups or {}
+local setups=fonts.expansions.setups
+local function initializeexpansion(tfmdata,value)
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor=setup.factor or 1
+ tfmdata.parameters.expansion={
+ stretch=10*(setup.stretch or 0),
+ shrink=10*(setup.shrink or 0),
+ step=10*(setup.step or 0),
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v=setup[i]
+ if v and v~=0 then
+ chr.expansion_factor=v*factor
+ else
+ chr.expansion_factor=factor
+ end
+ end
+ end
+ end
+end
+otffeatures.register {
+ name="expansion",
+ description="apply hz optimization",
+ initializers={
+ base=initializeexpansion,
+ node=initializeexpansion,
+ }
+}
+function fonts.loggers.onetimemessage() end
+local byte=string.byte
+fonts.expansions.setups['default']={
+ stretch=2,shrink=2,step=.5,factor=1,
+ [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7,
+ [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7,
+ [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7,
+ [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7,
+ [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7,
+ [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7,
+ [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7,
+ [byte('w')]=0.7,[byte('z')]=0.7,
+ [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7,
+}
+fonts.protrusions.setups['default']={
+ factor=1,left=1,right=1,
+ [0x002C]={ 0,1 },
+ [0x002E]={ 0,1 },
+ [0x003A]={ 0,1 },
+ [0x003B]={ 0,1 },
+ [0x002D]={ 0,1 },
+ [0x2013]={ 0,0.50 },
+ [0x2014]={ 0,0.33 },
+ [0x3001]={ 0,1 },
+ [0x3002]={ 0,1 },
+ [0x060C]={ 0,1 },
+ [0x061B]={ 0,1 },
+ [0x06D4]={ 0,1 },
+}
+fonts.handlers.otf.features.normalize=function(t)
+ if t.rand then
+ t.rand="random"
+ end
+ return t
+end
+function fonts.helpers.nametoslot(name)
+ local t=type(name)
+ if t=="string" then
+ local tfmdata=fonts.hashes.identifiers[currentfont()]
+ local shared=tfmdata and tfmdata.shared
+ local fntdata=shared and shared.rawdata
+ return fntdata and fntdata.resources.unicodes[name]
+ elseif t=="number" then
+ return n
+ end
+end
+fonts.encodings=fonts.encodings or {}
+local reencodings={}
+fonts.encodings.reencodings=reencodings
+local function specialreencode(tfmdata,value)
+ local encoding=value and reencodings[value]
+ if encoding then
+ local temp={}
+ local char=tfmdata.characters
+ for k,v in next,encoding do
+ temp[k]=char[v]
+ end
+ for k,v in next,temp do
+ char[k]=temp[k]
+ end
+ return string.format("reencoded:%s",value)
+ end
+end
+local function reencode(tfmdata,value)
+ tfmdata.postprocessors=tfmdata.postprocessors or {}
+ table.insert(tfmdata.postprocessors,
+ function(tfmdata)
+ return specialreencode(tfmdata,value)
+ end
+ )
+end
+otffeatures.register {
+ name="reencode",
+ description="reencode characters",
+ manipulators={
+ base=reencode,
+ node=reencode,
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-cbk']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local nodes=nodes
+local traverse_id=node.traverse_id
+local free_node=node.free
+local remove_node=node.remove
+local glyph_code=nodes.nodecodes.glyph
+local disc_code=nodes.nodecodes.disc
+local ligaturing=node.ligaturing
+local kerning=node.kerning
+local basepass=true
+local function l_warning() texio.write_nl("warning: node.ligaturing called directly") l_warning=nil end
+local function k_warning() texio.write_nl("warning: node.kerning called directly") k_warning=nil end
+function node.ligaturing(...)
+ if basepass and l_warning then
+ l_warning()
+ end
+ return ligaturing(...)
+end
+function node.kerning(...)
+ if basepass and k_warning then
+ k_warning()
+ end
+ return kerning(...)
+end
+function nodes.handlers.setbasepass(v)
+ basepass=v
+end
+function nodes.handlers.nodepass(head)
+ local fontdata=fonts.hashes.identifiers
+ if fontdata then
+ local usedfonts={}
+ local basefonts={}
+ local prevfont=nil
+ local basefont=nil
+ local variants=nil
+ local redundant=nil
+ for n in traverse_id(glyph_code,head) do
+ local font=n.font
+ if font~=prevfont then
+ if basefont then
+ basefont[2]=n.prev
+ end
+ prevfont=font
+ local used=usedfonts[font]
+ if not used then
+ local tfmdata=fontdata[font]
+ if tfmdata then
+ local shared=tfmdata.shared
+ if shared then
+ local processors=shared.processes
+ if processors and #processors>0 then
+ usedfonts[font]=processors
+ elseif basepass then
+ basefont={ n,nil }
+ basefonts[#basefonts+1]=basefont
+ end
+ end
+ local resources=tfmdata.resources
+ variants=resources and resources.variants
+ variants=variants and next(variants) and variants or false
+ end
+ else
+ local tfmdata=fontdata[prevfont]
+ if tfmdata then
+ local resources=tfmdata.resources
+ variants=resources and resources.variants
+ variants=variants and next(variants) and variants or false
+ end
+ end
+ end
+ if variants then
+ local char=n.char
+ if char>=0xFE00 and (char<=0xFE0F or (char>=0xE0100 and char<=0xE01EF)) then
+ local hash=variants[char]
+ if hash then
+ local p=n.prev
+ if p and p.id==glyph_code then
+ local variant=hash[p.char]
+ if variant then
+ p.char=variant
+ if not redundant then
+ redundant={ n }
+ else
+ redundant[#redundant+1]=n
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if redundant then
+ for i=1,#redundant do
+ local n=redundant[i]
+ remove_node(head,n)
+ free_node(n)
+ end
+ end
+ for d in traverse_id(disc_code,head) do
+ local r=d.replace
+ if r then
+ for n in traverse_id(glyph_code,r) do
+ local font=n.font
+ if font~=prevfont then
+ prevfont=font
+ local used=usedfonts[font]
+ if not used then
+ local tfmdata=fontdata[font]
+ if tfmdata then
+ local shared=tfmdata.shared
+ if shared then
+ local processors=shared.processes
+ if processors and #processors>0 then
+ usedfonts[font]=processors
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if next(usedfonts) then
+ for font,processors in next,usedfonts do
+ for i=1,#processors do
+ head=processors[i](head,font,0) or head
+ end
+ end
+ end
+ if basepass and #basefonts>0 then
+ for i=1,#basefonts do
+ local range=basefonts[i]
+ local start=range[1]
+ local stop=range[2]
+ if start or stop then
+ local prev=nil
+ local next=nil
+ local front=start==head
+ if stop then
+ next=stop.next
+ start,stop=ligaturing(start,stop)
+ start,stop=kerning(start,stop)
+ elseif start then
+ prev=start.prev
+ start=ligaturing(start)
+ start=kerning(start)
+ end
+ if prev then
+ start.prev=prev
+ prev.next=start
+ end
+ if next then
+ stop.next=next
+ next.prev=stop
+ end
+ if front then
+ head=start
+ end
+ end
+ end
+ end
+ return head,true
+ else
+ return head,false
+ end
+end
+function nodes.handlers.basepass(head)
+ if basepass then
+ head=ligaturing(head)
+ head=kerning(head)
+ end
+ return head,true
+end
+local nodepass=nodes.handlers.nodepass
+local basepass=nodes.handlers.basepass
+local injectpass=nodes.injections.handler
+local protectpass=nodes.handlers.protectglyphs
+function nodes.simple_font_handler(head)
+ if head then
+ head=nodepass(head)
+ head=injectpass(head)
+ head=basepass(head)
+ protectpass(head)
+ return head,true
+ else
+ return head,false
+ end
+end
+
+end -- closure
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib-test.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib-test.lua
new file mode 100644
index 00000000000..db6a729098d
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib-test.lua
@@ -0,0 +1,25 @@
+local gm = swiglib("gmwand.core")
+
+gm.InitializeMagick(".")
+
+local magick_wand = gm.NewMagickWand()
+local drawing_wand = gm.NewDrawingWand()
+
+gm.MagickSetSize(magick_wand,800,600)
+gm.MagickReadImage(magick_wand,"xc:red")
+
+gm.DrawPushGraphicContext(drawing_wand)
+
+gm.DrawSetFillColor(drawing_wand,gm.NewPixelWand())
+
+-- gm.DrawSetFont(drawing_wand, kpse.findfile("DejaVuSerifBold.ttf"))
+-- gm.DrawSetFontSize(drawing_wand, 96)
+-- gm.DrawAnnotation(drawing_wand,300,200, "LuaTeX")
+
+gm.DrawPopGraphicContext(drawing_wand)
+gm.MagickDrawImage(magick_wand,drawing_wand)
+
+gm.MagickWriteImages(magick_wand,"./luatex-swiglib-test.jpg",1)
+
+gm.DestroyDrawingWand(drawing_wand)
+gm.DestroyMagickWand(magick_wand)
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib-test.tex b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib-test.tex
new file mode 100644
index 00000000000..d26bb6ffbb3
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib-test.tex
@@ -0,0 +1,11 @@
+% luatex --fmt=luatex=plain luatex-swiglib-test.tex
+
+\input luatex-swiglib.tex
+
+\directlua {
+ dofile("luatex-swiglib-test.lua")
+}
+
+\pdfximage {luatex-swiglib-test.jpg} \pdfrefximage\pdflastximage
+
+\end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib.lua
new file mode 100644
index 00000000000..7ffcdc37562
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib.lua
@@ -0,0 +1,62 @@
+if not modules then modules = { } end modules ['luatex-swiglib'] = {
+ version = 1.001,
+ comment = "companion to luatex-swiglib.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local savedrequire = require
+
+local libsuffix = os.type == "windows" and ".dll" or ".so"
+
+function requireswiglib(required,version)
+ local library = package.loaded[required]
+ if library then
+ return library
+ else
+ local name = string.gsub(required,"%.","/") .. libsuffix
+ local list = kpse.show_path("clua")
+ for root in string.gmatch(list,"([^;]+)") do
+ local full = false
+ if type(version) == "string" and version ~= "" then
+ full = root .. "/" .. version .. "/" .. name
+ full = lfs.isfile(full) and full
+ end
+ if not full then
+ full = root .. "/" .. name
+ full = lfs.isfile(full) and full
+ end
+ if full then
+ local path, base = string.match(full,"^(.-)([^\\/]+)" .. libsuffix .."$")
+ local savedlibrary = package.loaded[base]
+ package.loaded[base] = nil
+ local savedpath = lfs.currentdir()
+ lfs.chdir(path)
+ library = package.loadlib(full,"luaopen_" .. base)
+ if type(library) == "function" then
+ library = library()
+ texio.write("<swiglib: '",required,"' is loaded>")
+ end
+ lfs.chdir(savedpath)
+ package.loaded[base] = savedlibrary
+ package.loaded[required] = library
+ return library
+ end
+ end
+ texio.write("<swiglib: '",name,"'is not found on '",list,"'")
+ end
+ texio.write("<swiglib: '",required,"' is not found>")
+end
+
+function require(name)
+ if string.find(name,"^swiglib%.") then
+ return requireswiglib(name)
+ else
+ return savedrequire(name)
+ end
+end
+
+function swiglib(name,version)
+ return requireswiglib("swiglib." .. name,version)
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib.tex b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib.tex
new file mode 100644
index 00000000000..7c437751587
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib.tex
@@ -0,0 +1,20 @@
+%D \module
+%D [ file=luatex-swiglib,
+%D version=2013.03.30,
+%D title=\LUATEX\ Support Macros,
+%D subtitle=Generic \SWIGLIB\ Font Handler,
+%D author=Hans Hagen,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+
+%D This is an experimental setup. Usage:
+%D
+%D \starttyping
+%D local gm = swiglib("gmwand.core")
+%D local gm = require("swiglib.gmwand.core")
+%D local sq = swiglib("mysql.core")
+%D local sq = swiglib("mysql.core","5.6")
+%D \stoptyping
+
+\directlua {
+ dofile(kpse.find_file("luatex-swiglib.lua","tex"))
+}
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-test.tex b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-test.tex
new file mode 100644
index 00000000000..f851aab6f38
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-test.tex
@@ -0,0 +1,134 @@
+% texformat=luatex-plain
+
+%D \module
+%D [ file=luatex-test,
+%D version=2009.12.01,
+%D title=\LUATEX\ Support Macros,
+%D subtitle=Simple Test File,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+
+%D See \type {luatex-plain.tex} (or on my machine \type {luatex.tex}
+%D for how to make a format.
+
+% You can generate a font database with:
+%
+% mtxrun --script fonts --reload --save
+%
+% The file luatex-fonts-names.lua has to be moved to a place
+% where kpse can find it.
+
+\pdfoutput=1
+
+\font\testa=file:lmroman10-regular at 12pt \testa \input tufte \par
+\font\testb=file:lmroman12-regular:+liga; at 24pt \testb effe flink fietsen \par
+\font\testc=file:lmroman12-regular:mode=node;+liga; at 24pt \testc effe flink fietsen \par
+\font\testd=name:lmroman10bold at 12pt \testd a bit bold \par
+
+\font\oeps=cmr10
+
+\font\oeps=[lmroman12-regular]:+liga at 30pt \oeps crap
+\font\oeps=[lmroman12-regular] at 40pt \oeps more crap
+
+\font\cidtest=adobesongstd-light
+
+\font\mathtest=cambria(math) {\mathtest 123}
+
+% \font\gothic=msgothic(ms-gothic) {\gothic whatever} % no longer in windows 10
+
+\bgroup
+
+ \ifdefined\pdfprotrudechars \pdfprotrudechars \else \protrudechars \fi 2 \relax
+ \ifdefined\pdfadjustspacing \pdfadjustspacing \else \adjustspacing \fi 2 \relax
+
+ \font\testb=file:lmroman12-regular:+liga;extend=1.5 at 12pt \testb \input tufte \par
+ \font\testb=file:lmroman12-regular:+liga;slant=0.8 at 12pt \testb \input tufte \par
+ \font\testb=file:lmroman12-regular:+liga;protrusion=default at 12pt \testb \input tufte \par
+
+\egroup
+
+\setmplibformat{plain}
+
+\directlua {
+ function MpTest()
+ metapost.print("fullcircle scaled 3cm")
+ end
+}
+
+\mplibcode
+ beginfig(1) ;
+ draw fullcircle
+ scaled 10cm
+ withcolor red
+ withpen pencircle xscaled 4mm yscaled 2mm rotated 30 ;
+ draw "test" infont defaultfont scaled 4 ;
+ verbatimtex \sl etex;
+ draw btex some more test etex scaled 2 ;
+ currentpicture := currentpicture shifted (0,1cm) ;
+ verbatimtex \bf etex;
+ draw btex another test etex scaled 2 ;
+ currentpicture := currentpicture shifted (0,1cm) ;
+ draw btex another test etex scaled 2 ;
+ draw
+ runscript("MpTest()")
+ withcolor green
+ withpen pencircle xscaled 2mm yscaled 1mm rotated 20 ;
+ endfig ;
+\endmplibcode
+
+\font\mine=file:luatex-fonts-demo-vf-1.lua at 12pt
+
+\mine \input tufte \par
+
+% \font\mine=file:luatex-fonts-demo-vf-2.lua at 12pt \mine [abab] \par
+% \font\mine=file:luatex-fonts-demo-vf-3.lua at 12pt \mine [abab] \par
+
+\font\test=dejavuserif:+kern at 10pt \test
+
+
+\bgroup \hsize 1mm \noindent Циолковский \par \egroup
+
+\loadpatterns{ru}
+
+\bgroup \hsize 1mm \noindent Циолковский \par \egroup
+
+a bit of math
+
+$\it e=mc^2 \bf e=mc^2 \Uchar"1D49D$
+
+$$\left( { {1} \over { {1} \over {x} } } \right) $$
+
+$$\sqrt {2} { { {1} \over { {1} \over {x} } } } $$
+
+\font\cows=file:koeieletters.afm at 50pt
+
+\cows Hello World!
+
+% math test
+
+\latinmodern
+
+\def\sqrt{\Uroot "0 "221A{}}
+
+\def\root#1\of{\Uroot "0 "221A{#1}}
+
+Inline $\sqrt{x}{1.2}$ math. % same for $\root n of x$
+
+$\root3\of x$
+
+$\sin{x}$
+
+\lucidabright
+
+\def\sqrt{\Uroot "0 "221A{}}
+
+\def\root#1\of{\Uroot "0 "221A{#1}}
+
+Inline $\sqrt{x}{1.2}$ math. % same for $\root n of x$
+
+$\root3\of x$
+
+$\sin{x}$
+
+\end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fontloader.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-tl2014.lua
index 12b68a57d9f..12b68a57d9f 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fontloader.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-tl2014.lua
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/fontloader-util-str.lua b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-util-str.lua
new file mode 100644
index 00000000000..95534c8d87f
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/fontloader-util-str.lua
@@ -0,0 +1,1134 @@
+if not modules then modules = { } end modules ['util-str'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+utilities = utilities or { }
+utilities.strings = utilities.strings or { }
+local strings = utilities.strings
+
+local format, gsub, rep, sub = string.format, string.gsub, string.rep, string.sub
+local load, dump = load, string.dump
+local tonumber, type, tostring = tonumber, type, tostring
+local unpack, concat = table.unpack, table.concat
+local P, V, C, S, R, Ct, Cs, Cp, Carg, Cc = lpeg.P, lpeg.V, lpeg.C, lpeg.S, lpeg.R, lpeg.Ct, lpeg.Cs, lpeg.Cp, lpeg.Carg, lpeg.Cc
+local patterns, lpegmatch = lpeg.patterns, lpeg.match
+local utfchar, utfbyte = utf.char, utf.byte
+----- loadstripped = utilities.lua.loadstripped
+----- setmetatableindex = table.setmetatableindex
+
+local loadstripped = nil
+
+if _LUAVERSION < 5.2 then
+
+ loadstripped = function(str,shortcuts)
+ return load(str)
+ end
+
+else
+
+ loadstripped = function(str,shortcuts)
+ if shortcuts then
+ return load(dump(load(str),true),nil,nil,shortcuts)
+ else
+ return load(dump(load(str),true))
+ end
+ end
+
+end
+
+-- todo: make a special namespace for the formatter
+
+if not number then number = { } end -- temp hack for luatex-fonts
+
+local stripper = patterns.stripzeros
+local newline = patterns.newline
+local endofstring = patterns.endofstring
+local whitespace = patterns.whitespace
+local spacer = patterns.spacer
+local spaceortab = patterns.spaceortab
+
+local function points(n)
+ n = tonumber(n)
+ return (not n or n == 0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
+end
+
+local function basepoints(n)
+ n = tonumber(n)
+ return (not n or n == 0) and "0bp" or lpegmatch(stripper,format("%.5fbp", n*(7200/7227)/65536))
+end
+
+number.points = points
+number.basepoints = basepoints
+
+-- str = " \n \ntest \n test\ntest "
+-- print("["..string.gsub(string.collapsecrlf(str),"\n","+").."]")
+
+local rubish = spaceortab^0 * newline
+local anyrubish = spaceortab + newline
+local anything = patterns.anything
+local stripped = (spaceortab^1 / "") * newline
+local leading = rubish^0 / ""
+local trailing = (anyrubish^1 * endofstring) / ""
+local redundant = rubish^3 / "\n"
+
+local pattern = Cs(leading * (trailing + redundant + stripped + anything)^0)
+
+function strings.collapsecrlf(str)
+ return lpegmatch(pattern,str)
+end
+
+-- The following functions might end up in another namespace.
+
+local repeaters = { } -- watch how we also moved the -1 in depth-1 to the creator
+
+function strings.newrepeater(str,offset)
+ offset = offset or 0
+ local s = repeaters[str]
+ if not s then
+ s = { }
+ repeaters[str] = s
+ end
+ local t = s[offset]
+ if t then
+ return t
+ end
+ t = { }
+ setmetatable(t, { __index = function(t,k)
+ if not k then
+ return ""
+ end
+ local n = k + offset
+ local s = n > 0 and rep(str,n) or ""
+ t[k] = s
+ return s
+ end })
+ s[offset] = t
+ return t
+end
+
+-- local dashes = strings.newrepeater("--",-1)
+-- print(dashes[2],dashes[3],dashes[1])
+
+local extra, tab, start = 0, 0, 4, 0
+
+local nspaces = strings.newrepeater(" ")
+
+string.nspaces = nspaces
+
+local pattern =
+ Carg(1) / function(t)
+ extra, tab, start = 0, t or 7, 1
+ end
+ * Cs((
+ Cp() * patterns.tab / function(position)
+ local current = (position - start + 1) + extra
+ local spaces = tab-(current-1) % tab
+ if spaces > 0 then
+ extra = extra + spaces - 1
+ return nspaces[spaces] -- rep(" ",spaces)
+ else
+ return ""
+ end
+ end
+ + newline * Cp() / function(position)
+ extra, start = 0, position
+ end
+ + patterns.anything
+ )^1)
+
+function strings.tabtospace(str,tab)
+ return lpegmatch(pattern,str,1,tab or 7)
+end
+
+-- local t = {
+-- "1234567123456712345671234567",
+-- "\tb\tc",
+-- "a\tb\tc",
+-- "aa\tbb\tcc",
+-- "aaa\tbbb\tccc",
+-- "aaaa\tbbbb\tcccc",
+-- "aaaaa\tbbbbb\tccccc",
+-- "aaaaaa\tbbbbbb\tcccccc\n aaaaaa\tbbbbbb\tcccccc",
+-- "one\n two\nxxx three\nxx four\nx five\nsix",
+-- }
+-- for k=1,#t do
+-- print(strings.tabtospace(t[k]))
+-- end
+
+-- todo: lpeg
+
+-- function strings.striplong(str) -- strips all leading spaces
+-- str = gsub(str,"^%s*","")
+-- str = gsub(str,"[\n\r]+ *","\n")
+-- return str
+-- end
+
+local space = spacer^0
+local nospace = space/""
+local endofline = nospace * newline
+
+local stripend = (whitespace^1 * endofstring)/""
+
+local normalline = (nospace * ((1-space*(newline+endofstring))^1) * nospace)
+
+local stripempty = endofline^1/""
+local normalempty = endofline^1
+local singleempty = endofline * (endofline^0/"")
+local doubleempty = endofline * endofline^-1 * (endofline^0/"")
+
+local stripstart = stripempty^0
+
+local p_prune_normal = Cs ( stripstart * ( stripend + normalline + normalempty )^0 )
+local p_prune_collapse = Cs ( stripstart * ( stripend + normalline + doubleempty )^0 )
+local p_prune_noempty = Cs ( stripstart * ( stripend + normalline + singleempty )^0 )
+local p_retain_normal = Cs ( ( normalline + normalempty )^0 )
+local p_retain_collapse = Cs ( ( normalline + doubleempty )^0 )
+local p_retain_noempty = Cs ( ( normalline + singleempty )^0 )
+
+-- function striplines(str,prune,collapse,noempty)
+-- if prune then
+-- if noempty then
+-- return lpegmatch(p_prune_noempty,str) or str
+-- elseif collapse then
+-- return lpegmatch(p_prune_collapse,str) or str
+-- else
+-- return lpegmatch(p_prune_normal,str) or str
+-- end
+-- else
+-- if noempty then
+-- return lpegmatch(p_retain_noempty,str) or str
+-- elseif collapse then
+-- return lpegmatch(p_retain_collapse,str) or str
+-- else
+-- return lpegmatch(p_retain_normal,str) or str
+-- end
+-- end
+-- end
+
+local striplinepatterns = {
+ ["prune"] = p_prune_normal,
+ ["prune and collapse"] = p_prune_collapse, -- default
+ ["prune and no empty"] = p_prune_noempty,
+ ["retain"] = p_retain_normal,
+ ["retain and collapse"] = p_retain_collapse,
+ ["retain and no empty"] = p_retain_noempty,
+ ["collapse"] = patterns.collapser, -- how about: stripper fullstripper
+}
+
+setmetatable(striplinepatterns,{ __index = function(t,k) return p_prune_collapse end })
+
+strings.striplinepatterns = striplinepatterns
+
+function strings.striplines(str,how)
+ return str and lpegmatch(striplinepatterns[how],str) or str
+end
+
+-- also see: string.collapsespaces
+
+strings.striplong = strings.striplines -- for old times sake
+
+-- local str = table.concat( {
+-- " ",
+-- " aap",
+-- " noot mies",
+-- " ",
+-- " ",
+-- " zus wim jet",
+-- "zus wim jet",
+-- " zus wim jet",
+-- " ",
+-- }, "\n")
+
+-- local str = table.concat( {
+-- " aaaa",
+-- " bb",
+-- " cccccc",
+-- }, "\n")
+
+-- for k, v in table.sortedhash(utilities.strings.striplinepatterns) do
+-- logs.report("stripper","method: %s, result: [[%s]]",k,utilities.strings.striplines(str,k))
+-- end
+
+-- inspect(strings.striplong([[
+-- aaaa
+-- bb
+-- cccccc
+-- ]]))
+
+function strings.nice(str)
+ str = gsub(str,"[:%-+_]+"," ") -- maybe more
+ return str
+end
+
+-- Work in progress. Interesting is that compared to the built-in this is faster in
+-- luatex than in luajittex where we have a comparable speed. It only makes sense
+-- to use the formatter when a (somewhat) complex format is used a lot. Each formatter
+-- is a function so there is some overhead and not all formatted output is worth that
+-- overhead. Keep in mind that there is an extra function call involved. In principle
+-- we end up with a string concatination so one could inline such a sequence but often
+-- at the cost of less readabinity. So, it's a sort of (visual) compromise. Of course
+-- there is the benefit of more variants. (Concerning the speed: a simple format like
+-- %05fpt is better off with format than with a formatter, but as soon as you put
+-- something in front formatters become faster. Passing the pt as extra argument makes
+-- formatters behave better. Of course this is rather implementation dependent. Also,
+-- when a specific format is only used a few times the overhead in creating it is not
+-- compensated by speed.)
+--
+-- More info can be found in cld-mkiv.pdf so here I stick to a simple list.
+--
+-- integer %...i number
+-- integer %...d number
+-- unsigned %...u number
+-- character %...c number
+-- hexadecimal %...x number
+-- HEXADECIMAL %...X number
+-- octal %...o number
+-- string %...s string number
+-- float %...f number
+-- checked float %...F number
+-- exponential %...e number
+-- exponential %...E number
+-- autofloat %...g number
+-- autofloat %...G number
+-- utf character %...c number
+-- force tostring %...S any
+-- force tostring %Q any
+-- force tonumber %N number (strip leading zeros)
+-- signed number %I number
+-- rounded number %r number
+-- 0xhexadecimal %...h character number
+-- 0xHEXADECIMAL %...H character number
+-- U+hexadecimal %...u character number
+-- U+HEXADECIMAL %...U character number
+-- points %p number (scaled points)
+-- basepoints %b number (scaled points)
+-- table concat %...t table
+-- table concat %{.}t table
+-- serialize %...T sequenced (no nested tables)
+-- serialize %{.}T sequenced (no nested tables)
+-- boolean (logic) %l boolean
+-- BOOLEAN %L boolean
+-- whitespace %...w
+-- automatic %...a 'whatever' (string, table, ...)
+-- automatic %...A "whatever" (string, table, ...)
+
+local n = 0
+
+-- we are somewhat sloppy in parsing prefixes as it's not that critical
+
+-- hard to avoid but we can collect them in a private namespace if needed
+
+-- inline the next two makes no sense as we only use this in logging
+
+local sequenced = table.sequenced
+
+function string.autodouble(s,sep)
+ if s == nil then
+ return '""'
+ end
+ local t = type(s)
+ if t == "number" then
+ return tostring(s) -- tostring not really needed
+ end
+ if t == "table" then
+ return ('"' .. sequenced(s,sep or ",") .. '"')
+ end
+ return ('"' .. tostring(s) .. '"')
+end
+
+function string.autosingle(s,sep)
+ if s == nil then
+ return "''"
+ end
+ local t = type(s)
+ if t == "number" then
+ return tostring(s) -- tostring not really needed
+ end
+ if t == "table" then
+ return ("'" .. sequenced(s,sep or ",") .. "'")
+ end
+ return ("'" .. tostring(s) .. "'")
+end
+
+local tracedchars = { [0] =
+ -- the regular bunch
+ "[null]", "[soh]", "[stx]", "[etx]", "[eot]", "[enq]", "[ack]", "[bel]",
+ "[bs]", "[ht]", "[lf]", "[vt]", "[ff]", "[cr]", "[so]", "[si]",
+ "[dle]", "[dc1]", "[dc2]", "[dc3]", "[dc4]", "[nak]", "[syn]", "[etb]",
+ "[can]", "[em]", "[sub]", "[esc]", "[fs]", "[gs]", "[rs]", "[us]",
+ -- plus space
+ "[space]", -- 0x20
+}
+
+string.tracedchars = tracedchars
+strings.tracers = tracedchars
+
+function string.tracedchar(b)
+ -- todo: table
+ if type(b) == "number" then
+ return tracedchars[b] or (utfchar(b) .. " (U+" .. format("%05X",b) .. ")")
+ else
+ local c = utfbyte(b)
+ return tracedchars[c] or (b .. " (U+" .. (c and format("%05X",c) or "?????") .. ")")
+ end
+end
+
+function number.signed(i)
+ if i > 0 then
+ return "+", i
+ else
+ return "-", -i
+ end
+end
+
+local zero = P("0")^1 / ""
+local plus = P("+") / ""
+local minus = P("-")
+local separator = S(".")
+local digit = R("09")
+local trailing = zero^1 * #S("eE")
+local exponent = (S("eE") * (plus + Cs((minus * zero^0 * P(-1))/"") + minus) * zero^0 * (P(-1) * Cc("0") + P(1)^1))
+local pattern_a = Cs(minus^0 * digit^1 * (separator/"" * trailing + separator * (trailing + digit)^0) * exponent)
+local pattern_b = Cs((exponent + P(1))^0)
+
+function number.sparseexponent(f,n)
+ if not n then
+ n = f
+ f = "%e"
+ end
+ local tn = type(n)
+ if tn == "string" then -- cast to number
+ local m = tonumber(n)
+ if m then
+ return lpegmatch((f == "%e" or f == "%E") and pattern_a or pattern_b,format(f,m))
+ end
+ elseif tn == "number" then
+ return lpegmatch((f == "%e" or f == "%E") and pattern_a or pattern_b,format(f,n))
+ end
+ return tostring(n)
+end
+
+local template = [[
+%s
+%s
+return function(%s) return %s end
+]]
+
+local preamble, environment = "", { }
+
+if _LUAVERSION < 5.2 then
+
+ preamble = [[
+local lpeg=lpeg
+local type=type
+local tostring=tostring
+local tonumber=tonumber
+local format=string.format
+local concat=table.concat
+local signed=number.signed
+local points=number.points
+local basepoints= number.basepoints
+local utfchar=utf.char
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local nspaces=string.nspaces
+local tracedchar=string.tracedchar
+local autosingle=string.autosingle
+local autodouble=string.autodouble
+local sequenced=table.sequenced
+local formattednumber=number.formatted
+local sparseexponent=number.sparseexponent
+ ]]
+
+else
+
+ environment = {
+ global = global or _G,
+ lpeg = lpeg,
+ type = type,
+ tostring = tostring,
+ tonumber = tonumber,
+ format = string.format,
+ concat = table.concat,
+ signed = number.signed,
+ points = number.points,
+ basepoints = number.basepoints,
+ utfchar = utf.char,
+ utfbyte = utf.byte,
+ lpegmatch = lpeg.match,
+ nspaces = string.nspaces,
+ tracedchar = string.tracedchar,
+ autosingle = string.autosingle,
+ autodouble = string.autodouble,
+ sequenced = table.sequenced,
+ formattednumber = number.formatted,
+ sparseexponent = number.sparseexponent,
+ }
+
+end
+
+-- -- --
+
+local arguments = { "a1" } -- faster than previously used (select(n,...))
+
+setmetatable(arguments, { __index =
+ function(t,k)
+ local v = t[k-1] .. ",a" .. k
+ t[k] = v
+ return v
+ end
+})
+
+local prefix_any = C((S("+- .") + R("09"))^0)
+local prefix_tab = P("{") * C((1-P("}"))^0) * P("}") + C((1-R("az","AZ","09","%%"))^0)
+
+-- we've split all cases as then we can optimize them (let's omit the fuzzy u)
+
+-- todo: replace outer formats in next by ..
+
+local format_s = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("format('%%%ss',a%s)",f,n)
+ else -- best no tostring in order to stay compatible (.. does a selective tostring too)
+ return format("(a%s or '')",n) -- goodie: nil check
+ end
+end
+
+local format_S = function(f) -- can be optimized
+ n = n + 1
+ if f and f ~= "" then
+ return format("format('%%%ss',tostring(a%s))",f,n)
+ else
+ return format("tostring(a%s)",n)
+ end
+end
+
+local format_q = function()
+ n = n + 1
+ return format("(a%s and format('%%q',a%s) or '')",n,n) -- goodie: nil check (maybe separate lpeg, not faster)
+end
+
+local format_Q = function() -- can be optimized
+ n = n + 1
+ return format("format('%%q',tostring(a%s))",n)
+end
+
+local format_i = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("format('%%%si',a%s)",f,n)
+ else
+ return format("format('%%i',a%s)",n) -- why not just tostring()
+ end
+end
+
+local format_d = format_i
+
+local format_I = function(f)
+ n = n + 1
+ return format("format('%%s%%%si',signed(a%s))",f,n)
+end
+
+local format_f = function(f)
+ n = n + 1
+ return format("format('%%%sf',a%s)",f,n)
+end
+
+-- The next one formats an integer as integer and very small values as zero. This is needed
+-- for pdf backend code.
+--
+-- 1.23 % 1 : 0.23
+-- - 1.23 % 1 : 0.77
+--
+-- We could probably use just %s with integers but who knows what Lua 5.3 will do? So let's
+-- for the moment use %i.
+
+local format_F = function(f) -- beware, no cast to number
+ n = n + 1
+ if not f or f == "" then
+ return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n)
+ else
+ return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n)
+ end
+end
+
+local format_g = function(f)
+ n = n + 1
+ return format("format('%%%sg',a%s)",f,n)
+end
+
+local format_G = function(f)
+ n = n + 1
+ return format("format('%%%sG',a%s)",f,n)
+end
+
+local format_e = function(f)
+ n = n + 1
+ return format("format('%%%se',a%s)",f,n)
+end
+
+local format_E = function(f)
+ n = n + 1
+ return format("format('%%%sE',a%s)",f,n)
+end
+
+local format_j = function(f)
+ n = n + 1
+ return format("sparseexponent('%%%se',a%s)",f,n)
+end
+
+local format_J = function(f)
+ n = n + 1
+ return format("sparseexponent('%%%sE',a%s)",f,n)
+end
+
+local format_x = function(f)
+ n = n + 1
+ return format("format('%%%sx',a%s)",f,n)
+end
+
+local format_X = function(f)
+ n = n + 1
+ return format("format('%%%sX',a%s)",f,n)
+end
+
+local format_o = function(f)
+ n = n + 1
+ return format("format('%%%so',a%s)",f,n)
+end
+
+local format_c = function()
+ n = n + 1
+ return format("utfchar(a%s)",n)
+end
+
+local format_C = function()
+ n = n + 1
+ return format("tracedchar(a%s)",n)
+end
+
+local format_r = function(f)
+ n = n + 1
+ return format("format('%%%s.0f',a%s)",f,n)
+end
+
+local format_h = function(f)
+ n = n + 1
+ if f == "-" then
+ f = sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ end
+end
+
+local format_H = function(f)
+ n = n + 1
+ if f == "-" then
+ f = sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ end
+end
+
+local format_u = function(f)
+ n = n + 1
+ if f == "-" then
+ f = sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ else
+ return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ end
+end
+
+local format_U = function(f)
+ n = n + 1
+ if f == "-" then
+ f = sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ else
+ return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ end
+end
+
+local format_p = function()
+ n = n + 1
+ return format("points(a%s)",n)
+end
+
+local format_b = function()
+ n = n + 1
+ return format("basepoints(a%s)",n)
+end
+
+local format_t = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("concat(a%s,%q)",n,f)
+ else
+ return format("concat(a%s)",n)
+ end
+end
+
+local format_T = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("sequenced(a%s,%q)",n,f)
+ else
+ return format("sequenced(a%s)",n)
+ end
+end
+
+local format_l = function()
+ n = n + 1
+ return format("(a%s and 'true' or 'false')",n)
+end
+
+local format_L = function()
+ n = n + 1
+ return format("(a%s and 'TRUE' or 'FALSE')",n)
+end
+
+local format_N = function() -- strips leading zeros
+ n = n + 1
+ return format("tostring(tonumber(a%s) or a%s)",n,n)
+end
+
+local format_a = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("autosingle(a%s,%q)",n,f)
+ else
+ return format("autosingle(a%s)",n)
+ end
+end
+
+local format_A = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("autodouble(a%s,%q)",n,f)
+ else
+ return format("autodouble(a%s)",n)
+ end
+end
+
+local format_w = function(f) -- handy when doing depth related indent
+ n = n + 1
+ f = tonumber(f)
+ if f then -- not that useful
+ return format("nspaces[%s+a%s]",f,n) -- no real need for tonumber
+ else
+ return format("nspaces[a%s]",n) -- no real need for tonumber
+ end
+end
+
+local format_W = function(f) -- handy when doing depth related indent
+ return format("nspaces[%s]",tonumber(f) or 0)
+end
+
+-- maybe to util-num
+
+local digit = patterns.digit
+local period = patterns.period
+local three = digit * digit * digit
+
+local splitter = Cs (
+ (((1 - (three^1 * period))^1 + C(three)) * (Carg(1) * three)^1 + C((1-period)^1))
+ * (P(1)/"" * Carg(2)) * C(2)
+)
+
+patterns.formattednumber = splitter
+
+function number.formatted(n,sep1,sep2)
+ local s = type(s) == "string" and n or format("%0.2f",n)
+ if sep1 == true then
+ return lpegmatch(splitter,s,1,".",",")
+ elseif sep1 == "." then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ",")
+ elseif sep1 == "," then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ".")
+ else
+ return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".")
+ end
+end
+
+-- print(number.formatted(1))
+-- print(number.formatted(12))
+-- print(number.formatted(123))
+-- print(number.formatted(1234))
+-- print(number.formatted(12345))
+-- print(number.formatted(123456))
+-- print(number.formatted(1234567))
+-- print(number.formatted(12345678))
+-- print(number.formatted(12345678,true))
+-- print(number.formatted(1234.56,"!","?"))
+
+local format_m = function(f)
+ n = n + 1
+ if not f or f == "" then
+ f = ","
+ end
+ return format([[formattednumber(a%s,%q,".")]],n,f)
+end
+
+local format_M = function(f)
+ n = n + 1
+ if not f or f == "" then
+ f = "."
+ end
+ return format([[formattednumber(a%s,%q,",")]],n,f)
+end
+
+--
+
+local format_z = function(f)
+ n = n + (tonumber(f) or 1)
+ return "''" -- okay, not that efficient to append '' but a special case anyway
+end
+
+--
+
+local format_rest = function(s)
+ return format("%q",s) -- catches " and \n and such
+end
+
+local format_extension = function(extensions,f,name)
+ local extension = extensions[name] or "tostring(%s)"
+ local f = tonumber(f) or 1
+ if f == 0 then
+ return extension
+ elseif f == 1 then
+ n = n + 1
+ local a = "a" .. n
+ return format(extension,a,a) -- maybe more times?
+ elseif f < 0 then
+ local a = "a" .. (n + f + 1)
+ return format(extension,a,a)
+ else
+ local t = { }
+ for i=1,f do
+ n = n + 1
+ t[#t+1] = "a" .. n
+ end
+ return format(extension,unpack(t))
+ end
+end
+
+-- aA b cC d eE f gG hH iI jJ lL mM N o p qQ r sS tT uU wW xX z
+
+local builder = Cs { "start",
+ start = (
+ (
+ P("%") / ""
+ * (
+ V("!") -- new
+ + V("s") + V("q")
+ + V("i") + V("d")
+ + V("f") + V("F") + V("g") + V("G") + V("e") + V("E")
+ + V("x") + V("X") + V("o")
+ --
+ + V("c")
+ + V("C")
+ + V("S") -- new
+ + V("Q") -- new
+ + V("N") -- new
+ --
+ + V("r")
+ + V("h") + V("H") + V("u") + V("U")
+ + V("p") + V("b")
+ + V("t") + V("T")
+ + V("l") + V("L")
+ + V("I")
+ + V("w") -- new
+ + V("W") -- new
+ + V("a") -- new
+ + V("A") -- new
+ + V("j") + V("J") -- stripped e E
+ + V("m") + V("M") -- new
+ + V("z") -- new
+ --
+ -- + V("?") -- ignores probably messed up %
+ )
+ + V("*")
+ )
+ * (P(-1) + Carg(1))
+ )^0,
+ --
+ ["s"] = (prefix_any * P("s")) / format_s, -- %s => regular %s (string)
+ ["q"] = (prefix_any * P("q")) / format_q, -- %q => regular %q (quoted string)
+ ["i"] = (prefix_any * P("i")) / format_i, -- %i => regular %i (integer)
+ ["d"] = (prefix_any * P("d")) / format_d, -- %d => regular %d (integer)
+ ["f"] = (prefix_any * P("f")) / format_f, -- %f => regular %f (float)
+ ["F"] = (prefix_any * P("F")) / format_F, -- %F => regular %f (float) but 0/1 check
+ ["g"] = (prefix_any * P("g")) / format_g, -- %g => regular %g (float)
+ ["G"] = (prefix_any * P("G")) / format_G, -- %G => regular %G (float)
+ ["e"] = (prefix_any * P("e")) / format_e, -- %e => regular %e (float)
+ ["E"] = (prefix_any * P("E")) / format_E, -- %E => regular %E (float)
+ ["x"] = (prefix_any * P("x")) / format_x, -- %x => regular %x (hexadecimal)
+ ["X"] = (prefix_any * P("X")) / format_X, -- %X => regular %X (HEXADECIMAL)
+ ["o"] = (prefix_any * P("o")) / format_o, -- %o => regular %o (octal)
+ --
+ ["S"] = (prefix_any * P("S")) / format_S, -- %S => %s (tostring)
+ ["Q"] = (prefix_any * P("Q")) / format_S, -- %Q => %q (tostring)
+ ["N"] = (prefix_any * P("N")) / format_N, -- %N => tonumber (strips leading zeros)
+ ["c"] = (prefix_any * P("c")) / format_c, -- %c => utf character (extension to regular)
+ ["C"] = (prefix_any * P("C")) / format_C, -- %c => U+.... utf character
+ --
+ ["r"] = (prefix_any * P("r")) / format_r, -- %r => round
+ ["h"] = (prefix_any * P("h")) / format_h, -- %h => 0x0a1b2 (when - no 0x) was v
+ ["H"] = (prefix_any * P("H")) / format_H, -- %H => 0x0A1B2 (when - no 0x) was V
+ ["u"] = (prefix_any * P("u")) / format_u, -- %u => u+0a1b2 (when - no u+)
+ ["U"] = (prefix_any * P("U")) / format_U, -- %U => U+0A1B2 (when - no U+)
+ ["p"] = (prefix_any * P("p")) / format_p, -- %p => 12.345pt / maybe: P (and more units)
+ ["b"] = (prefix_any * P("b")) / format_b, -- %b => 12.342bp / maybe: B (and more units)
+ ["t"] = (prefix_tab * P("t")) / format_t, -- %t => concat
+ ["T"] = (prefix_tab * P("T")) / format_T, -- %t => sequenced
+ ["l"] = (prefix_any * P("l")) / format_l, -- %l => boolean
+ ["L"] = (prefix_any * P("L")) / format_L, -- %L => BOOLEAN
+ ["I"] = (prefix_any * P("I")) / format_I, -- %I => signed integer
+ --
+ ["w"] = (prefix_any * P("w")) / format_w, -- %w => n spaces (optional prefix is added)
+ ["W"] = (prefix_any * P("W")) / format_W, -- %W => mandate prefix, no specifier
+ --
+ ["j"] = (prefix_any * P("j")) / format_j, -- %j => %e (float) stripped exponent (irrational)
+ ["J"] = (prefix_any * P("J")) / format_J, -- %J => %E (float) stripped exponent (irrational)
+ --
+ ["m"] = (prefix_tab * P("m")) / format_m, -- %m => xxx.xxx.xxx,xx (optional prefix instead of .)
+ ["M"] = (prefix_tab * P("M")) / format_M, -- %M => xxx,xxx,xxx.xx (optional prefix instead of ,)
+ --
+ ["z"] = (prefix_any * P("z")) / format_z, -- %M => xxx,xxx,xxx.xx (optional prefix instead of ,)
+ --
+ ["a"] = (prefix_any * P("a")) / format_a, -- %a => '...' (forces tostring)
+ ["A"] = (prefix_any * P("A")) / format_A, -- %A => "..." (forces tostring)
+ --
+ ["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%")^1) / format_rest, -- rest (including %%)
+ ["?"] = Cs(((1-P("%"))^1 )^1) / format_rest, -- rest (including %%)
+ --
+ ["!"] = Carg(2) * prefix_any * P("!") * C((1-P("!"))^1) * P("!") / format_extension,
+}
+
+-- we can be clever and only alias what is needed
+
+-- local direct = Cs (
+-- P("%")/""
+-- * Cc([[local format = string.format return function(str) return format("%]])
+-- * (S("+- .") + R("09"))^0
+-- * S("sqidfgGeExXo")
+-- * Cc([[",str) end]])
+-- * P(-1)
+-- )
+
+local direct = Cs (
+ P("%")
+ * (S("+- .") + R("09"))^0
+ * S("sqidfgGeExXo")
+ * P(-1) / [[local format = string.format return function(str) return format("%0",str) end]]
+)
+
+local function make(t,str)
+ local f
+ local p
+ local p = lpegmatch(direct,str)
+ if p then
+ -- f = loadstripped(p)()
+ -- print("builder 1 >",p)
+ f = loadstripped(p)()
+ else
+ n = 0
+ -- p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n
+ p = lpegmatch(builder,str,1,t._connector_,t._extensions_) -- after this we know n
+ if n > 0 then
+ p = format(template,preamble,t._preamble_,arguments[n],p)
+ -- print("builder 2 >",p)
+ f = loadstripped(p,t._environment_)() -- t._environment is not populated (was experiment)
+ else
+ f = function() return str end
+ end
+ end
+ t[str] = f
+ return f
+end
+
+-- -- collect periodically
+--
+-- local threshold = 1000 -- max nof cached formats
+--
+-- local function make(t,str)
+-- local f = rawget(t,str)
+-- if f then
+-- return f
+-- end
+-- local parent = t._t_
+-- if parent._n_ > threshold then
+-- local m = { _t_ = parent }
+-- getmetatable(parent).__index = m
+-- setmetatable(m, { __index = make })
+-- else
+-- parent._n_ = parent._n_ + 1
+-- end
+-- local f
+-- local p = lpegmatch(direct,str)
+-- if p then
+-- f = loadstripped(p)()
+-- else
+-- n = 0
+-- p = lpegmatch(builder,str,1,"..",parent._extensions_) -- after this we know n
+-- if n > 0 then
+-- p = format(template,preamble,parent._preamble_,arguments[n],p)
+-- -- print("builder>",p)
+-- f = loadstripped(p)()
+-- else
+-- f = function() return str end
+-- end
+-- end
+-- t[str] = f
+-- return f
+-- end
+
+local function use(t,fmt,...)
+ return t[fmt](...)
+end
+
+strings.formatters = { }
+
+-- we cannot make these tables weak, unless we start using an indirect
+-- table (metatable) in which case we could better keep a count and
+-- clear that table when a threshold is reached
+
+-- _connector_ is an experiment
+
+if _LUAVERSION < 5.2 then
+
+ function strings.formatters.new(noconcat)
+ local t = { _type_ = "formatter", _connector_ = noconcat and "," or "..", _extensions_ = { }, _preamble_ = preamble, _environment_ = { } }
+ setmetatable(t, { __index = make, __call = use })
+ return t
+ end
+
+else
+
+ function strings.formatters.new(noconcat)
+ local e = { } -- better make a copy as we can overload
+ for k, v in next, environment do
+ e[k] = v
+ end
+ local t = { _type_ = "formatter", _connector_ = noconcat and "," or "..", _extensions_ = { }, _preamble_ = "", _environment_ = e }
+ setmetatable(t, { __index = make, __call = use })
+ return t
+ end
+
+end
+
+-- function strings.formatters.new()
+-- local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter", _n_ = 0 }
+-- local m = { _t_ = t }
+-- setmetatable(t, { __index = m, __call = use })
+-- setmetatable(m, { __index = make })
+-- return t
+-- end
+
+local formatters = strings.formatters.new() -- the default instance
+
+string.formatters = formatters -- in the main string namespace
+string.formatter = function(str,...) return formatters[str](...) end -- sometimes nicer name
+
+local function add(t,name,template,preamble)
+ if type(t) == "table" and t._type_ == "formatter" then
+ t._extensions_[name] = template or "%s"
+ if type(preamble) == "string" then
+ t._preamble_ = preamble .. "\n" .. t._preamble_ -- so no overload !
+ elseif type(preamble) == "table" then
+ for k, v in next, preamble do
+ t._environment_[k] = v
+ end
+ end
+ end
+end
+
+strings.formatters.add = add
+
+-- registered in the default instance (should we fall back on this one?)
+
+patterns.xmlescape = Cs((P("<")/"&lt;" + P(">")/"&gt;" + P("&")/"&amp;" + P('"')/"&quot;" + P(1))^0)
+patterns.texescape = Cs((C(S("#$%\\{}"))/"\\%1" + P(1))^0)
+patterns.luaescape = Cs(((1-S('"\n'))^1 + P('"')/'\\"' + P('\n')/'\\n"')^0) -- maybe also \0
+patterns.luaquoted = Cs(Cc('"') * ((1-S('"\n'))^1 + P('"')/'\\"' + P('\n')/'\\n"')^0 * Cc('"'))
+
+-- escaping by lpeg is faster for strings without quotes, slower on a string with quotes, but
+-- faster again when other q-escapables are found (the ones we don't need to escape)
+
+-- add(formatters,"xml", [[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+-- add(formatters,"tex", [[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+-- add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
+
+if _LUAVERSION < 5.2 then
+
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+
+else
+
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape = lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape = lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape = lpeg.patterns.luaescape })
+
+end
+
+-- -- yes or no:
+--
+-- local function make(t,str)
+-- local f
+-- local p = lpegmatch(direct,str)
+-- if p then
+-- f = loadstripped(p)()
+-- else
+-- n = 0
+-- p = lpegmatch(builder,str,1,",") -- after this we know n
+-- if n > 0 then
+-- p = format(template,template_shortcuts,arguments[n],p)
+-- f = loadstripped(p)()
+-- else
+-- f = function() return str end
+-- end
+-- end
+-- t[str] = f
+-- return f
+-- end
+--
+-- local formatteds = string.formatteds or { }
+-- string.formatteds = formatteds
+--
+-- setmetatable(formatteds, { __index = make, __call = use })
+
+-- This is a somewhat silly one used in commandline reconstruction but the older
+-- method, using a combination of fine, gsub, quoted and unquoted was not that
+-- reliable.
+--
+-- '"foo"bar \"and " whatever"' => "foo\"bar \"and \" whatever"
+-- 'foo"bar \"and " whatever' => "foo\"bar \"and \" whatever"
+
+local dquote = patterns.dquote -- P('"')
+local equote = patterns.escaped + dquote / '\\"' + 1
+local space = patterns.space
+local cquote = Cc('"')
+
+local pattern =
+ Cs(dquote * (equote - P(-2))^0 * dquote) -- we keep the outer but escape unescaped ones
+ + Cs(cquote * (equote - space)^0 * space * equote^0 * cquote) -- we escape unescaped ones
+
+function string.optionalquoted(str)
+ return lpegmatch(pattern,str) or str
+end
+
+local pattern = Cs((newline / (os.newline or "\r") + 1)^0)
+
+function string.replacenewlines(str)
+ return lpegmatch(pattern,str)
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-auxiliary.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-auxiliary.lua
index 89bf51b4d83..c50e0cdb647 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-auxiliary.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-auxiliary.lua
@@ -2,10 +2,8 @@
-----------------------------------------------------------------------
-- FILE: luaotfload-auxiliary.lua
-- DESCRIPTION: part of luaotfload
--- REQUIREMENTS: luaotfload 2.5
+-- REQUIREMENTS: luaotfload 2.6
-- AUTHOR: Khaled Hosny, Élie Roux, Philipp Gesang
--- VERSION: 2.5
--- MODIFIED: 2014-01-02 21:24:25+0100
-----------------------------------------------------------------------
--
@@ -19,6 +17,7 @@ local aux = luaotfload.aux
local log = luaotfload.log
local report = log.report
local fonthashes = fonts.hashes
+local encodings = fonts.encodings
local identifiers = fonthashes.identifiers
local fontnames = fonts.names
@@ -64,7 +63,7 @@ aux.start_rewrite_fontname = start_rewrite_fontname
local stop_rewrite_fontname = function ()
if rewriting == true then
- luatexbase.remove_fromt_callback
+ luatexbase.remove_from_callback
("luaotfload.patch_font", "luaotfload.rewrite_fontname")
rewriting = false
report ("log", 1, "aux",
@@ -129,6 +128,35 @@ luatexbase.add_to_callback(
"luaotfload.patch_font",
patch_cambria_domh,
"luaotfload.aux.patch_cambria_domh")
+
+
+--[[doc--
+
+ Add missing field to fonts that lack it. Addresses issue
+ https://github.com/lualatex/luaotfload/issues/253
+
+ This is considered a hack, especially since importing the
+ unicode-math package fixes the problem quite nicely.
+
+--doc]]--
+
+--- fontobj -> unit
+local fixup_fontdata = function (data)
+
+ local t = type (data)
+ --- Some OT fonts like Libertine R lack the resources table, causing
+ --- the fontloader to nil-index.
+ if t == "table" then
+ if data and not data.resources then data.resources = { } end
+ end
+
+end
+
+luatexbase.add_to_callback(
+ "luaotfload.patch_font_unsafe",
+ fixup_fontdata,
+ "luaotfload.aux.fixup_fontdata")
+
--[[doc--
@@ -185,8 +213,6 @@ luatexbase.add_to_callback(
--- glyphs and characters
-----------------------------------------------------------------------
-local agl = fonts.encodings.agl
-
--- int -> int -> bool
local font_has_glyph = function (font_id, codepoint)
local fontdata = fonts.hashes.identifiers[font_id]
@@ -203,7 +229,7 @@ aux.font_has_glyph = font_has_glyph
local raw_slot_of_name = function (font_id, glyphname)
local fontdata = font.fonts[font_id]
if fontdata.type == "virtual" then --- get base font for glyph idx
- local codepoint = agl.unicodes[glyphname]
+ local codepoint = encodings.agl.unicodes[glyphname]
local glyph = fontdata.characters[codepoint]
if fontdata.characters[codepoint] then
return codepoint
@@ -264,7 +290,7 @@ local indices
--- int -> (string | false)
local name_of_slot = function (codepoint)
if not indices then --- this will load the glyph list
- local unicodes = agl.unicodes
+ local unicodes = encodings.agl.unicodes
indices = table.swapped(unicodes)
end
local glyphname = indices[codepoint]
@@ -417,25 +443,25 @@ least one feature.
local provides_script = function (font_id, asked_script)
asked_script = stringlower(asked_script)
if font_id and font_id > 0 then
- local fontdata = identifiers[font_id].shared.rawdata
- if fontdata then
- local fontname = fontdata.metadata.fontname
- local features = fontdata.resources.features
- for method, featuredata in next, features do
- --- where method: "gpos" | "gsub"
- for feature, data in next, featuredata do
- if data[asked_script] then
- report ("log", 1, "aux",
- "font no %d (%s) defines feature %s for script %s",
- font_id, fontname, feature, asked_script)
- return true
- end
+ local tfmdata = identifiers[font_id] if not tfmdata then return false end
+ local shared = tfmdata.shared if not shared then return false end
+ local fontdata = shared.rawdata if not fontdata then return false end
+ local fontname = fontdata.metadata.fontname
+ local features = fontdata.resources.features
+ for method, featuredata in next, features do
+ --- where method: "gpos" | "gsub"
+ for feature, data in next, featuredata do
+ if data[asked_script] then
+ report ("log", 1, "aux",
+ "font no %d (%s) defines feature %s for script %s",
+ font_id, fontname, feature, asked_script)
+ return true
end
end
- report ("log", 0, "aux",
- "font no %d (%s) defines no feature for script %s",
- font_id, fontname, asked_script)
end
+ report ("log", 0, "aux",
+ "font no %d (%s) defines no feature for script %s",
+ font_id, fontname, asked_script)
end
report ("log", 0, "aux", "no font with id %d", font_id)
return false
@@ -455,29 +481,29 @@ local provides_language = function (font_id, asked_script, asked_language)
asked_script = stringlower(asked_script)
asked_language = stringlower(asked_language)
if font_id and font_id > 0 then
- local fontdata = identifiers[font_id].shared.rawdata
- if fontdata then
- local fontname = fontdata.metadata.fontname
- local features = fontdata.resources.features
- for method, featuredata in next, features do
- --- where method: "gpos" | "gsub"
- for feature, data in next, featuredata do
- local scriptdata = data[asked_script]
- if scriptdata and scriptdata[asked_language] then
- report ("log", 1, "aux",
- "font no %d (%s) defines feature %s "
- .. "for script %s with language %s",
- font_id, fontname, feature,
- asked_script, asked_language)
- return true
- end
+ local tfmdata = identifiers[font_id] if not tfmdata then return false end
+ local shared = tfmdata.shared if not shared then return false end
+ local fontdata = shared.rawdata if not fontdata then return false end
+ local fontname = fontdata.metadata.fontname
+ local features = fontdata.resources.features
+ for method, featuredata in next, features do
+ --- where method: "gpos" | "gsub"
+ for feature, data in next, featuredata do
+ local scriptdata = data[asked_script]
+ if scriptdata and scriptdata[asked_language] then
+ report ("log", 1, "aux",
+ "font no %d (%s) defines feature %s "
+ .. "for script %s with language %s",
+ font_id, fontname, feature,
+ asked_script, asked_language)
+ return true
end
end
- report ("log", 0, "aux",
- "font no %d (%s) defines no feature "
- .. "for script %s with language %s",
- font_id, fontname, asked_script, asked_language)
end
+ report ("log", 0, "aux",
+ "font no %d (%s) defines no feature "
+ .. "for script %s with language %s",
+ font_id, fontname, asked_script, asked_language)
end
report ("log", 0, "aux", "no font with id %d", font_id)
return false
@@ -527,29 +553,29 @@ local provides_feature = function (font_id, asked_script,
asked_feature = lpegmatch(strip_garbage, asked_feature)
if font_id and font_id > 0 then
- local fontdata = identifiers[font_id].shared.rawdata
- if fontdata then
- local features = fontdata.resources.features
- local fontname = fontdata.metadata.fontname
- for method, featuredata in next, features do
- --- where method: "gpos" | "gsub"
- local feature = featuredata[asked_feature]
- if feature then
- local scriptdata = feature[asked_script]
- if scriptdata and scriptdata[asked_language] then
- report ("log", 1, "aux",
- "font no %d (%s) defines feature %s "
- .. "for script %s with language %s",
- font_id, fontname, asked_feature,
- asked_script, asked_language)
- return true
- end
+ local tfmdata = identifiers[font_id] if not tfmdata then return false end
+ local shared = tfmdata.shared if not shared then return false end
+ local fontdata = shared.rawdata if not fontdata then return false end
+ local features = fontdata.resources.features
+ local fontname = fontdata.metadata.fontname
+ for method, featuredata in next, features do
+ --- where method: "gpos" | "gsub"
+ local feature = featuredata[asked_feature]
+ if feature then
+ local scriptdata = feature[asked_script]
+ if scriptdata and scriptdata[asked_language] then
+ report ("log", 1, "aux",
+ "font no %d (%s) defines feature %s "
+ .. "for script %s with language %s",
+ font_id, fontname, asked_feature,
+ asked_script, asked_language)
+ return true
end
end
- report ("log", 0, "aux",
- "font no %d (%s) does not define feature %s for script %s with language %s",
- font_id, fontname, asked_feature, asked_script, asked_language)
end
+ report ("log", 0, "aux",
+ "font no %d (%s) does not define feature %s for script %s with language %s",
+ font_id, fontname, asked_feature, asked_script, asked_language)
end
report ("log", 0, "aux", "no font with id %d", font_id)
return false
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-characters.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-characters.lua
index 680a8aab8ca..ebb6eb460da 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-characters.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-characters.lua
@@ -8391,6 +8391,18 @@ return {
category="lo",
direction="al",
},
+ [0x8B3]={
+ category="lo",
+ direction="al",
+ },
+ [0x8B4]={
+ category="lo",
+ direction="al",
+ },
+ [0x8E3]={
+ category="mn",
+ direction="nsm",
+ },
[0x8E4]={
category="mn",
direction="nsm",
@@ -10039,6 +10051,10 @@ return {
category="sc",
direction="et",
},
+ [0xAF9]={
+ category="lo",
+ direction="l",
+ },
[0xB01]={
category="mn",
direction="nsm",
@@ -10979,6 +10995,10 @@ return {
category="lo",
direction="l",
},
+ [0xC5A]={
+ category="lo",
+ direction="l",
+ },
[0xC60]={
category="lo",
direction="l",
@@ -11703,6 +11723,10 @@ return {
category="mc",
direction="l",
},
+ [0xD5F]={
+ category="lo",
+ direction="l",
+ },
[0xD60]={
category="lo",
direction="l",
@@ -17527,6 +17551,34 @@ return {
category="lo",
direction="l",
},
+ [0x13F5]={
+ category="lu",
+ direction="l",
+ },
+ [0x13F8]={
+ category="ll",
+ direction="l",
+ },
+ [0x13F9]={
+ category="ll",
+ direction="l",
+ },
+ [0x13FA]={
+ category="ll",
+ direction="l",
+ },
+ [0x13FB]={
+ category="ll",
+ direction="l",
+ },
+ [0x13FC]={
+ category="ll",
+ direction="l",
+ },
+ [0x13FD]={
+ category="ll",
+ direction="l",
+ },
[0x1400]={
category="pd",
direction="on",
@@ -29155,6 +29207,10 @@ return {
category="sc",
direction="et",
},
+ [0x20BE]={
+ category="sc",
+ direction="et",
+ },
[0x20D0]={
category="mn",
direction="nsm",
@@ -29839,6 +29895,14 @@ return {
category="no",
direction="on",
},
+ [0x218A]={
+ category="so",
+ direction="on",
+ },
+ [0x218B]={
+ category="so",
+ direction="on",
+ },
[0x2190]={
category="sm",
direction="on",
@@ -40389,6 +40453,22 @@ return {
category="so",
direction="on",
},
+ [0x2BEC]={
+ category="so",
+ direction="on",
+ },
+ [0x2BED]={
+ category="so",
+ direction="on",
+ },
+ [0x2BEE]={
+ category="so",
+ direction="on",
+ },
+ [0x2BEF]={
+ category="so",
+ direction="on",
+ },
[0x2C00]={
category="lu",
direction="l",
@@ -54771,6 +54851,10 @@ return {
category="lm",
direction="l",
},
+ [0xA69E]={
+ category="mn",
+ direction="nsm",
+ },
[0xA69F]={
category="mn",
direction="nsm",
@@ -55699,6 +55783,10 @@ return {
category="ll",
direction="l",
},
+ [0xA78F]={
+ category="lo",
+ direction="l",
+ },
[0xA790]={
category="lu",
direction="l",
@@ -55827,6 +55915,30 @@ return {
category="lu",
direction="l",
},
+ [0xA7B2]={
+ category="lu",
+ direction="l",
+ },
+ [0xA7B3]={
+ category="lu",
+ direction="l",
+ },
+ [0xA7B4]={
+ category="lu",
+ direction="l",
+ },
+ [0xA7B5]={
+ category="ll",
+ direction="l",
+ },
+ [0xA7B6]={
+ category="lu",
+ direction="l",
+ },
+ [0xA7B7]={
+ category="ll",
+ direction="l",
+ },
[0xA7F7]={
category="lo",
direction="l",
@@ -56739,6 +56851,14 @@ return {
category="lo",
direction="l",
},
+ [0xA8FC]={
+ category="po",
+ direction="l",
+ },
+ [0xA8FD]={
+ category="lo",
+ direction="l",
+ },
[0xA900]={
category="nd",
direction="l",
@@ -58843,6 +58963,22 @@ return {
category="lm",
direction="l",
},
+ [0xAB60]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB61]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB62]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB63]={
+ category="ll",
+ direction="l",
+ },
[0xAB64]={
category="ll",
direction="l",
@@ -58851,6 +58987,326 @@ return {
category="ll",
direction="l",
},
+ [0xAB70]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB71]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB72]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB73]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB74]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB75]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB76]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB77]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB78]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB79]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB7A]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB7B]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB7C]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB7D]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB7E]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB7F]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB80]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB81]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB82]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB83]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB84]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB85]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB86]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB87]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB88]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB89]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB8A]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB8B]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB8C]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB8D]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB8E]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB8F]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB90]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB91]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB92]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB93]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB94]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB95]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB96]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB97]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB98]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB99]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB9A]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB9B]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB9C]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB9D]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB9E]={
+ category="ll",
+ direction="l",
+ },
+ [0xAB9F]={
+ category="ll",
+ direction="l",
+ },
+ [0xABA0]={
+ category="ll",
+ direction="l",
+ },
+ [0xABA1]={
+ category="ll",
+ direction="l",
+ },
+ [0xABA2]={
+ category="ll",
+ direction="l",
+ },
+ [0xABA3]={
+ category="ll",
+ direction="l",
+ },
+ [0xABA4]={
+ category="ll",
+ direction="l",
+ },
+ [0xABA5]={
+ category="ll",
+ direction="l",
+ },
+ [0xABA6]={
+ category="ll",
+ direction="l",
+ },
+ [0xABA7]={
+ category="ll",
+ direction="l",
+ },
+ [0xABA8]={
+ category="ll",
+ direction="l",
+ },
+ [0xABA9]={
+ category="ll",
+ direction="l",
+ },
+ [0xABAA]={
+ category="ll",
+ direction="l",
+ },
+ [0xABAB]={
+ category="ll",
+ direction="l",
+ },
+ [0xABAC]={
+ category="ll",
+ direction="l",
+ },
+ [0xABAD]={
+ category="ll",
+ direction="l",
+ },
+ [0xABAE]={
+ category="ll",
+ direction="l",
+ },
+ [0xABAF]={
+ category="ll",
+ direction="l",
+ },
+ [0xABB0]={
+ category="ll",
+ direction="l",
+ },
+ [0xABB1]={
+ category="ll",
+ direction="l",
+ },
+ [0xABB2]={
+ category="ll",
+ direction="l",
+ },
+ [0xABB3]={
+ category="ll",
+ direction="l",
+ },
+ [0xABB4]={
+ category="ll",
+ direction="l",
+ },
+ [0xABB5]={
+ category="ll",
+ direction="l",
+ },
+ [0xABB6]={
+ category="ll",
+ direction="l",
+ },
+ [0xABB7]={
+ category="ll",
+ direction="l",
+ },
+ [0xABB8]={
+ category="ll",
+ direction="l",
+ },
+ [0xABB9]={
+ category="ll",
+ direction="l",
+ },
+ [0xABBA]={
+ category="ll",
+ direction="l",
+ },
+ [0xABBB]={
+ category="ll",
+ direction="l",
+ },
+ [0xABBC]={
+ category="ll",
+ direction="l",
+ },
+ [0xABBD]={
+ category="ll",
+ direction="l",
+ },
+ [0xABBE]={
+ category="ll",
+ direction="l",
+ },
+ [0xABBF]={
+ category="ll",
+ direction="l",
+ },
[0xABC0]={
category="lo",
direction="l",
@@ -64119,6 +64575,14 @@ return {
category="mn",
direction="nsm",
},
+ [0xFE2E]={
+ category="mn",
+ direction="nsm",
+ },
+ [0xFE2F]={
+ category="mn",
+ direction="nsm",
+ },
[0xFE30]={
category="po",
direction="on",
@@ -71703,6 +72167,110 @@ return {
category="no",
direction="r",
},
+ [0x108E0]={
+ category="lo",
+ direction="r",
+ },
+ [0x108E1]={
+ category="lo",
+ direction="r",
+ },
+ [0x108E2]={
+ category="lo",
+ direction="r",
+ },
+ [0x108E3]={
+ category="lo",
+ direction="r",
+ },
+ [0x108E4]={
+ category="lo",
+ direction="r",
+ },
+ [0x108E5]={
+ category="lo",
+ direction="r",
+ },
+ [0x108E6]={
+ category="lo",
+ direction="r",
+ },
+ [0x108E7]={
+ category="lo",
+ direction="r",
+ },
+ [0x108E8]={
+ category="lo",
+ direction="r",
+ },
+ [0x108E9]={
+ category="lo",
+ direction="r",
+ },
+ [0x108EA]={
+ category="lo",
+ direction="r",
+ },
+ [0x108EB]={
+ category="lo",
+ direction="r",
+ },
+ [0x108EC]={
+ category="lo",
+ direction="r",
+ },
+ [0x108ED]={
+ category="lo",
+ direction="r",
+ },
+ [0x108EE]={
+ category="lo",
+ direction="r",
+ },
+ [0x108EF]={
+ category="lo",
+ direction="r",
+ },
+ [0x108F0]={
+ category="lo",
+ direction="r",
+ },
+ [0x108F1]={
+ category="lo",
+ direction="r",
+ },
+ [0x108F2]={
+ category="lo",
+ direction="r",
+ },
+ [0x108F4]={
+ category="lo",
+ direction="r",
+ },
+ [0x108F5]={
+ category="lo",
+ direction="r",
+ },
+ [0x108FB]={
+ category="no",
+ direction="r",
+ },
+ [0x108FC]={
+ category="no",
+ direction="r",
+ },
+ [0x108FD]={
+ category="no",
+ direction="r",
+ },
+ [0x108FE]={
+ category="no",
+ direction="r",
+ },
+ [0x108FF]={
+ category="no",
+ direction="r",
+ },
[0x10900]={
category="lo",
direction="r",
@@ -72151,6 +72719,14 @@ return {
category="lo",
direction="r",
},
+ [0x109BC]={
+ category="no",
+ direction="r",
+ },
+ [0x109BD]={
+ category="no",
+ direction="r",
+ },
[0x109BE]={
category="lo",
direction="r",
@@ -72159,6 +72735,254 @@ return {
category="lo",
direction="r",
},
+ [0x109C0]={
+ category="no",
+ direction="r",
+ },
+ [0x109C1]={
+ category="no",
+ direction="r",
+ },
+ [0x109C2]={
+ category="no",
+ direction="r",
+ },
+ [0x109C3]={
+ category="no",
+ direction="r",
+ },
+ [0x109C4]={
+ category="no",
+ direction="r",
+ },
+ [0x109C5]={
+ category="no",
+ direction="r",
+ },
+ [0x109C6]={
+ category="no",
+ direction="r",
+ },
+ [0x109C7]={
+ category="no",
+ direction="r",
+ },
+ [0x109C8]={
+ category="no",
+ direction="r",
+ },
+ [0x109C9]={
+ category="no",
+ direction="r",
+ },
+ [0x109CA]={
+ category="no",
+ direction="r",
+ },
+ [0x109CB]={
+ category="no",
+ direction="r",
+ },
+ [0x109CC]={
+ category="no",
+ direction="r",
+ },
+ [0x109CD]={
+ category="no",
+ direction="r",
+ },
+ [0x109CE]={
+ category="no",
+ direction="r",
+ },
+ [0x109CF]={
+ category="no",
+ direction="r",
+ },
+ [0x109D2]={
+ category="no",
+ direction="r",
+ },
+ [0x109D3]={
+ category="no",
+ direction="r",
+ },
+ [0x109D4]={
+ category="no",
+ direction="r",
+ },
+ [0x109D5]={
+ category="no",
+ direction="r",
+ },
+ [0x109D6]={
+ category="no",
+ direction="r",
+ },
+ [0x109D7]={
+ category="no",
+ direction="r",
+ },
+ [0x109D8]={
+ category="no",
+ direction="r",
+ },
+ [0x109D9]={
+ category="no",
+ direction="r",
+ },
+ [0x109DA]={
+ category="no",
+ direction="r",
+ },
+ [0x109DB]={
+ category="no",
+ direction="r",
+ },
+ [0x109DC]={
+ category="no",
+ direction="r",
+ },
+ [0x109DD]={
+ category="no",
+ direction="r",
+ },
+ [0x109DE]={
+ category="no",
+ direction="r",
+ },
+ [0x109DF]={
+ category="no",
+ direction="r",
+ },
+ [0x109E0]={
+ category="no",
+ direction="r",
+ },
+ [0x109E1]={
+ category="no",
+ direction="r",
+ },
+ [0x109E2]={
+ category="no",
+ direction="r",
+ },
+ [0x109E3]={
+ category="no",
+ direction="r",
+ },
+ [0x109E4]={
+ category="no",
+ direction="r",
+ },
+ [0x109E5]={
+ category="no",
+ direction="r",
+ },
+ [0x109E6]={
+ category="no",
+ direction="r",
+ },
+ [0x109E7]={
+ category="no",
+ direction="r",
+ },
+ [0x109E8]={
+ category="no",
+ direction="r",
+ },
+ [0x109E9]={
+ category="no",
+ direction="r",
+ },
+ [0x109EA]={
+ category="no",
+ direction="r",
+ },
+ [0x109EB]={
+ category="no",
+ direction="r",
+ },
+ [0x109EC]={
+ category="no",
+ direction="r",
+ },
+ [0x109ED]={
+ category="no",
+ direction="r",
+ },
+ [0x109EE]={
+ category="no",
+ direction="r",
+ },
+ [0x109EF]={
+ category="no",
+ direction="r",
+ },
+ [0x109F0]={
+ category="no",
+ direction="r",
+ },
+ [0x109F1]={
+ category="no",
+ direction="r",
+ },
+ [0x109F2]={
+ category="no",
+ direction="r",
+ },
+ [0x109F3]={
+ category="no",
+ direction="r",
+ },
+ [0x109F4]={
+ category="no",
+ direction="r",
+ },
+ [0x109F5]={
+ category="no",
+ direction="r",
+ },
+ [0x109F6]={
+ category="no",
+ direction="r",
+ },
+ [0x109F7]={
+ category="no",
+ direction="r",
+ },
+ [0x109F8]={
+ category="no",
+ direction="r",
+ },
+ [0x109F9]={
+ category="no",
+ direction="r",
+ },
+ [0x109FA]={
+ category="no",
+ direction="r",
+ },
+ [0x109FB]={
+ category="no",
+ direction="r",
+ },
+ [0x109FC]={
+ category="no",
+ direction="r",
+ },
+ [0x109FD]={
+ category="no",
+ direction="r",
+ },
+ [0x109FE]={
+ category="no",
+ direction="r",
+ },
+ [0x109FF]={
+ category="no",
+ direction="r",
+ },
[0x10A00]={
category="lo",
direction="r",
@@ -73759,6 +74583,438 @@ return {
category="lo",
direction="r",
},
+ [0x10C80]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C81]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C82]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C83]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C84]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C85]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C86]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C87]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C88]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C89]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C8A]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C8B]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C8C]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C8D]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C8E]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C8F]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C90]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C91]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C92]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C93]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C94]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C95]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C96]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C97]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C98]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C99]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C9A]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C9B]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C9C]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C9D]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C9E]={
+ category="lu",
+ direction="r",
+ },
+ [0x10C9F]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CA0]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CA1]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CA2]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CA3]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CA4]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CA5]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CA6]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CA7]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CA8]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CA9]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CAA]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CAB]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CAC]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CAD]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CAE]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CAF]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CB0]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CB1]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CB2]={
+ category="lu",
+ direction="r",
+ },
+ [0x10CC0]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CC1]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CC2]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CC3]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CC4]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CC5]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CC6]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CC7]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CC8]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CC9]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CCA]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CCB]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CCC]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CCD]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CCE]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CCF]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CD0]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CD1]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CD2]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CD3]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CD4]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CD5]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CD6]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CD7]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CD8]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CD9]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CDA]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CDB]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CDC]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CDD]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CDE]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CDF]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CE0]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CE1]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CE2]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CE3]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CE4]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CE5]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CE6]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CE7]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CE8]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CE9]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CEA]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CEB]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CEC]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CED]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CEE]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CEF]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CF0]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CF1]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CF2]={
+ category="ll",
+ direction="r",
+ },
+ [0x10CFA]={
+ category="no",
+ direction="r",
+ },
+ [0x10CFB]={
+ category="no",
+ direction="r",
+ },
+ [0x10CFC]={
+ category="no",
+ direction="r",
+ },
+ [0x10CFD]={
+ category="no",
+ direction="r",
+ },
+ [0x10CFE]={
+ category="no",
+ direction="r",
+ },
+ [0x10CFF]={
+ category="no",
+ direction="r",
+ },
[0x10E60]={
category="no",
direction="an",
@@ -75439,6 +76695,22 @@ return {
category="po",
direction="l",
},
+ [0x111C9]={
+ category="po",
+ direction="l",
+ },
+ [0x111CA]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x111CB]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x111CC]={
+ category="mn",
+ direction="nsm",
+ },
[0x111CD]={
category="po",
direction="l",
@@ -75487,6 +76759,26 @@ return {
category="lo",
direction="l",
},
+ [0x111DB]={
+ category="po",
+ direction="l",
+ },
+ [0x111DC]={
+ category="lo",
+ direction="l",
+ },
+ [0x111DD]={
+ category="po",
+ direction="l",
+ },
+ [0x111DE]={
+ category="po",
+ direction="l",
+ },
+ [0x111DF]={
+ category="po",
+ direction="l",
+ },
[0x111E1]={
category="no",
direction="l",
@@ -75811,6 +77103,158 @@ return {
category="po",
direction="l",
},
+ [0x11280]={
+ category="lo",
+ direction="l",
+ },
+ [0x11281]={
+ category="lo",
+ direction="l",
+ },
+ [0x11282]={
+ category="lo",
+ direction="l",
+ },
+ [0x11283]={
+ category="lo",
+ direction="l",
+ },
+ [0x11284]={
+ category="lo",
+ direction="l",
+ },
+ [0x11285]={
+ category="lo",
+ direction="l",
+ },
+ [0x11286]={
+ category="lo",
+ direction="l",
+ },
+ [0x11288]={
+ category="lo",
+ direction="l",
+ },
+ [0x1128A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1128B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1128C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1128D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1128F]={
+ category="lo",
+ direction="l",
+ },
+ [0x11290]={
+ category="lo",
+ direction="l",
+ },
+ [0x11291]={
+ category="lo",
+ direction="l",
+ },
+ [0x11292]={
+ category="lo",
+ direction="l",
+ },
+ [0x11293]={
+ category="lo",
+ direction="l",
+ },
+ [0x11294]={
+ category="lo",
+ direction="l",
+ },
+ [0x11295]={
+ category="lo",
+ direction="l",
+ },
+ [0x11296]={
+ category="lo",
+ direction="l",
+ },
+ [0x11297]={
+ category="lo",
+ direction="l",
+ },
+ [0x11298]={
+ category="lo",
+ direction="l",
+ },
+ [0x11299]={
+ category="lo",
+ direction="l",
+ },
+ [0x1129A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1129B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1129C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1129D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1129F]={
+ category="lo",
+ direction="l",
+ },
+ [0x112A0]={
+ category="lo",
+ direction="l",
+ },
+ [0x112A1]={
+ category="lo",
+ direction="l",
+ },
+ [0x112A2]={
+ category="lo",
+ direction="l",
+ },
+ [0x112A3]={
+ category="lo",
+ direction="l",
+ },
+ [0x112A4]={
+ category="lo",
+ direction="l",
+ },
+ [0x112A5]={
+ category="lo",
+ direction="l",
+ },
+ [0x112A6]={
+ category="lo",
+ direction="l",
+ },
+ [0x112A7]={
+ category="lo",
+ direction="l",
+ },
+ [0x112A8]={
+ category="lo",
+ direction="l",
+ },
+ [0x112A9]={
+ category="po",
+ direction="l",
+ },
[0x112B0]={
category="lo",
direction="l",
@@ -76087,6 +77531,10 @@ return {
category="nd",
direction="l",
},
+ [0x11300]={
+ category="mn",
+ direction="nsm",
+ },
[0x11301]={
category="mn",
direction="nsm",
@@ -76339,6 +77787,10 @@ return {
category="mc",
direction="l",
},
+ [0x11350]={
+ category="lo",
+ direction="l",
+ },
[0x11357]={
category="mc",
direction="l",
@@ -77035,6 +78487,86 @@ return {
category="po",
direction="l",
},
+ [0x115CA]={
+ category="po",
+ direction="l",
+ },
+ [0x115CB]={
+ category="po",
+ direction="l",
+ },
+ [0x115CC]={
+ category="po",
+ direction="l",
+ },
+ [0x115CD]={
+ category="po",
+ direction="l",
+ },
+ [0x115CE]={
+ category="po",
+ direction="l",
+ },
+ [0x115CF]={
+ category="po",
+ direction="l",
+ },
+ [0x115D0]={
+ category="po",
+ direction="l",
+ },
+ [0x115D1]={
+ category="po",
+ direction="l",
+ },
+ [0x115D2]={
+ category="po",
+ direction="l",
+ },
+ [0x115D3]={
+ category="po",
+ direction="l",
+ },
+ [0x115D4]={
+ category="po",
+ direction="l",
+ },
+ [0x115D5]={
+ category="po",
+ direction="l",
+ },
+ [0x115D6]={
+ category="po",
+ direction="l",
+ },
+ [0x115D7]={
+ category="po",
+ direction="l",
+ },
+ [0x115D8]={
+ category="lo",
+ direction="l",
+ },
+ [0x115D9]={
+ category="lo",
+ direction="l",
+ },
+ [0x115DA]={
+ category="lo",
+ direction="l",
+ },
+ [0x115DB]={
+ category="lo",
+ direction="l",
+ },
+ [0x115DC]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x115DD]={
+ category="mn",
+ direction="nsm",
+ },
[0x11600]={
category="lo",
direction="l",
@@ -77615,6 +79147,234 @@ return {
category="nd",
direction="l",
},
+ [0x11700]={
+ category="lo",
+ direction="l",
+ },
+ [0x11701]={
+ category="lo",
+ direction="l",
+ },
+ [0x11702]={
+ category="lo",
+ direction="l",
+ },
+ [0x11703]={
+ category="lo",
+ direction="l",
+ },
+ [0x11704]={
+ category="lo",
+ direction="l",
+ },
+ [0x11705]={
+ category="lo",
+ direction="l",
+ },
+ [0x11706]={
+ category="lo",
+ direction="l",
+ },
+ [0x11707]={
+ category="lo",
+ direction="l",
+ },
+ [0x11708]={
+ category="lo",
+ direction="l",
+ },
+ [0x11709]={
+ category="lo",
+ direction="l",
+ },
+ [0x1170A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1170B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1170C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1170D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1170E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1170F]={
+ category="lo",
+ direction="l",
+ },
+ [0x11710]={
+ category="lo",
+ direction="l",
+ },
+ [0x11711]={
+ category="lo",
+ direction="l",
+ },
+ [0x11712]={
+ category="lo",
+ direction="l",
+ },
+ [0x11713]={
+ category="lo",
+ direction="l",
+ },
+ [0x11714]={
+ category="lo",
+ direction="l",
+ },
+ [0x11715]={
+ category="lo",
+ direction="l",
+ },
+ [0x11716]={
+ category="lo",
+ direction="l",
+ },
+ [0x11717]={
+ category="lo",
+ direction="l",
+ },
+ [0x11718]={
+ category="lo",
+ direction="l",
+ },
+ [0x11719]={
+ category="lo",
+ direction="l",
+ },
+ [0x1171D]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1171E]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1171F]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x11720]={
+ category="mc",
+ direction="l",
+ },
+ [0x11721]={
+ category="mc",
+ direction="l",
+ },
+ [0x11722]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x11723]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x11724]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x11725]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x11726]={
+ category="mc",
+ direction="l",
+ },
+ [0x11727]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x11728]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x11729]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1172A]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1172B]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x11730]={
+ category="nd",
+ direction="l",
+ },
+ [0x11731]={
+ category="nd",
+ direction="l",
+ },
+ [0x11732]={
+ category="nd",
+ direction="l",
+ },
+ [0x11733]={
+ category="nd",
+ direction="l",
+ },
+ [0x11734]={
+ category="nd",
+ direction="l",
+ },
+ [0x11735]={
+ category="nd",
+ direction="l",
+ },
+ [0x11736]={
+ category="nd",
+ direction="l",
+ },
+ [0x11737]={
+ category="nd",
+ direction="l",
+ },
+ [0x11738]={
+ category="nd",
+ direction="l",
+ },
+ [0x11739]={
+ category="nd",
+ direction="l",
+ },
+ [0x1173A]={
+ category="no",
+ direction="l",
+ },
+ [0x1173B]={
+ category="no",
+ direction="l",
+ },
+ [0x1173C]={
+ category="po",
+ direction="l",
+ },
+ [0x1173D]={
+ category="po",
+ direction="l",
+ },
+ [0x1173E]={
+ category="po",
+ direction="l",
+ },
+ [0x1173F]={
+ category="so",
+ direction="l",
+ },
[0x118A0]={
category="lu",
direction="l",
@@ -81863,6 +83623,10 @@ return {
category="lo",
direction="l",
},
+ [0x12399]={
+ category="lo",
+ direction="l",
+ },
[0x12400]={
category="nl",
direction="l",
@@ -82327,6 +84091,790 @@ return {
category="po",
direction="l",
},
+ [0x12480]={
+ category="lo",
+ direction="l",
+ },
+ [0x12481]={
+ category="lo",
+ direction="l",
+ },
+ [0x12482]={
+ category="lo",
+ direction="l",
+ },
+ [0x12483]={
+ category="lo",
+ direction="l",
+ },
+ [0x12484]={
+ category="lo",
+ direction="l",
+ },
+ [0x12485]={
+ category="lo",
+ direction="l",
+ },
+ [0x12486]={
+ category="lo",
+ direction="l",
+ },
+ [0x12487]={
+ category="lo",
+ direction="l",
+ },
+ [0x12488]={
+ category="lo",
+ direction="l",
+ },
+ [0x12489]={
+ category="lo",
+ direction="l",
+ },
+ [0x1248A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1248B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1248C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1248D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1248E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1248F]={
+ category="lo",
+ direction="l",
+ },
+ [0x12490]={
+ category="lo",
+ direction="l",
+ },
+ [0x12491]={
+ category="lo",
+ direction="l",
+ },
+ [0x12492]={
+ category="lo",
+ direction="l",
+ },
+ [0x12493]={
+ category="lo",
+ direction="l",
+ },
+ [0x12494]={
+ category="lo",
+ direction="l",
+ },
+ [0x12495]={
+ category="lo",
+ direction="l",
+ },
+ [0x12496]={
+ category="lo",
+ direction="l",
+ },
+ [0x12497]={
+ category="lo",
+ direction="l",
+ },
+ [0x12498]={
+ category="lo",
+ direction="l",
+ },
+ [0x12499]={
+ category="lo",
+ direction="l",
+ },
+ [0x1249A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1249B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1249C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1249D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1249E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1249F]={
+ category="lo",
+ direction="l",
+ },
+ [0x124A0]={
+ category="lo",
+ direction="l",
+ },
+ [0x124A1]={
+ category="lo",
+ direction="l",
+ },
+ [0x124A2]={
+ category="lo",
+ direction="l",
+ },
+ [0x124A3]={
+ category="lo",
+ direction="l",
+ },
+ [0x124A4]={
+ category="lo",
+ direction="l",
+ },
+ [0x124A5]={
+ category="lo",
+ direction="l",
+ },
+ [0x124A6]={
+ category="lo",
+ direction="l",
+ },
+ [0x124A7]={
+ category="lo",
+ direction="l",
+ },
+ [0x124A8]={
+ category="lo",
+ direction="l",
+ },
+ [0x124A9]={
+ category="lo",
+ direction="l",
+ },
+ [0x124AA]={
+ category="lo",
+ direction="l",
+ },
+ [0x124AB]={
+ category="lo",
+ direction="l",
+ },
+ [0x124AC]={
+ category="lo",
+ direction="l",
+ },
+ [0x124AD]={
+ category="lo",
+ direction="l",
+ },
+ [0x124AE]={
+ category="lo",
+ direction="l",
+ },
+ [0x124AF]={
+ category="lo",
+ direction="l",
+ },
+ [0x124B0]={
+ category="lo",
+ direction="l",
+ },
+ [0x124B1]={
+ category="lo",
+ direction="l",
+ },
+ [0x124B2]={
+ category="lo",
+ direction="l",
+ },
+ [0x124B3]={
+ category="lo",
+ direction="l",
+ },
+ [0x124B4]={
+ category="lo",
+ direction="l",
+ },
+ [0x124B5]={
+ category="lo",
+ direction="l",
+ },
+ [0x124B6]={
+ category="lo",
+ direction="l",
+ },
+ [0x124B7]={
+ category="lo",
+ direction="l",
+ },
+ [0x124B8]={
+ category="lo",
+ direction="l",
+ },
+ [0x124B9]={
+ category="lo",
+ direction="l",
+ },
+ [0x124BA]={
+ category="lo",
+ direction="l",
+ },
+ [0x124BB]={
+ category="lo",
+ direction="l",
+ },
+ [0x124BC]={
+ category="lo",
+ direction="l",
+ },
+ [0x124BD]={
+ category="lo",
+ direction="l",
+ },
+ [0x124BE]={
+ category="lo",
+ direction="l",
+ },
+ [0x124BF]={
+ category="lo",
+ direction="l",
+ },
+ [0x124C0]={
+ category="lo",
+ direction="l",
+ },
+ [0x124C1]={
+ category="lo",
+ direction="l",
+ },
+ [0x124C2]={
+ category="lo",
+ direction="l",
+ },
+ [0x124C3]={
+ category="lo",
+ direction="l",
+ },
+ [0x124C4]={
+ category="lo",
+ direction="l",
+ },
+ [0x124C5]={
+ category="lo",
+ direction="l",
+ },
+ [0x124C6]={
+ category="lo",
+ direction="l",
+ },
+ [0x124C7]={
+ category="lo",
+ direction="l",
+ },
+ [0x124C8]={
+ category="lo",
+ direction="l",
+ },
+ [0x124C9]={
+ category="lo",
+ direction="l",
+ },
+ [0x124CA]={
+ category="lo",
+ direction="l",
+ },
+ [0x124CB]={
+ category="lo",
+ direction="l",
+ },
+ [0x124CC]={
+ category="lo",
+ direction="l",
+ },
+ [0x124CD]={
+ category="lo",
+ direction="l",
+ },
+ [0x124CE]={
+ category="lo",
+ direction="l",
+ },
+ [0x124CF]={
+ category="lo",
+ direction="l",
+ },
+ [0x124D0]={
+ category="lo",
+ direction="l",
+ },
+ [0x124D1]={
+ category="lo",
+ direction="l",
+ },
+ [0x124D2]={
+ category="lo",
+ direction="l",
+ },
+ [0x124D3]={
+ category="lo",
+ direction="l",
+ },
+ [0x124D4]={
+ category="lo",
+ direction="l",
+ },
+ [0x124D5]={
+ category="lo",
+ direction="l",
+ },
+ [0x124D6]={
+ category="lo",
+ direction="l",
+ },
+ [0x124D7]={
+ category="lo",
+ direction="l",
+ },
+ [0x124D8]={
+ category="lo",
+ direction="l",
+ },
+ [0x124D9]={
+ category="lo",
+ direction="l",
+ },
+ [0x124DA]={
+ category="lo",
+ direction="l",
+ },
+ [0x124DB]={
+ category="lo",
+ direction="l",
+ },
+ [0x124DC]={
+ category="lo",
+ direction="l",
+ },
+ [0x124DD]={
+ category="lo",
+ direction="l",
+ },
+ [0x124DE]={
+ category="lo",
+ direction="l",
+ },
+ [0x124DF]={
+ category="lo",
+ direction="l",
+ },
+ [0x124E0]={
+ category="lo",
+ direction="l",
+ },
+ [0x124E1]={
+ category="lo",
+ direction="l",
+ },
+ [0x124E2]={
+ category="lo",
+ direction="l",
+ },
+ [0x124E3]={
+ category="lo",
+ direction="l",
+ },
+ [0x124E4]={
+ category="lo",
+ direction="l",
+ },
+ [0x124E5]={
+ category="lo",
+ direction="l",
+ },
+ [0x124E6]={
+ category="lo",
+ direction="l",
+ },
+ [0x124E7]={
+ category="lo",
+ direction="l",
+ },
+ [0x124E8]={
+ category="lo",
+ direction="l",
+ },
+ [0x124E9]={
+ category="lo",
+ direction="l",
+ },
+ [0x124EA]={
+ category="lo",
+ direction="l",
+ },
+ [0x124EB]={
+ category="lo",
+ direction="l",
+ },
+ [0x124EC]={
+ category="lo",
+ direction="l",
+ },
+ [0x124ED]={
+ category="lo",
+ direction="l",
+ },
+ [0x124EE]={
+ category="lo",
+ direction="l",
+ },
+ [0x124EF]={
+ category="lo",
+ direction="l",
+ },
+ [0x124F0]={
+ category="lo",
+ direction="l",
+ },
+ [0x124F1]={
+ category="lo",
+ direction="l",
+ },
+ [0x124F2]={
+ category="lo",
+ direction="l",
+ },
+ [0x124F3]={
+ category="lo",
+ direction="l",
+ },
+ [0x124F4]={
+ category="lo",
+ direction="l",
+ },
+ [0x124F5]={
+ category="lo",
+ direction="l",
+ },
+ [0x124F6]={
+ category="lo",
+ direction="l",
+ },
+ [0x124F7]={
+ category="lo",
+ direction="l",
+ },
+ [0x124F8]={
+ category="lo",
+ direction="l",
+ },
+ [0x124F9]={
+ category="lo",
+ direction="l",
+ },
+ [0x124FA]={
+ category="lo",
+ direction="l",
+ },
+ [0x124FB]={
+ category="lo",
+ direction="l",
+ },
+ [0x124FC]={
+ category="lo",
+ direction="l",
+ },
+ [0x124FD]={
+ category="lo",
+ direction="l",
+ },
+ [0x124FE]={
+ category="lo",
+ direction="l",
+ },
+ [0x124FF]={
+ category="lo",
+ direction="l",
+ },
+ [0x12500]={
+ category="lo",
+ direction="l",
+ },
+ [0x12501]={
+ category="lo",
+ direction="l",
+ },
+ [0x12502]={
+ category="lo",
+ direction="l",
+ },
+ [0x12503]={
+ category="lo",
+ direction="l",
+ },
+ [0x12504]={
+ category="lo",
+ direction="l",
+ },
+ [0x12505]={
+ category="lo",
+ direction="l",
+ },
+ [0x12506]={
+ category="lo",
+ direction="l",
+ },
+ [0x12507]={
+ category="lo",
+ direction="l",
+ },
+ [0x12508]={
+ category="lo",
+ direction="l",
+ },
+ [0x12509]={
+ category="lo",
+ direction="l",
+ },
+ [0x1250A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1250B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1250C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1250D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1250E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1250F]={
+ category="lo",
+ direction="l",
+ },
+ [0x12510]={
+ category="lo",
+ direction="l",
+ },
+ [0x12511]={
+ category="lo",
+ direction="l",
+ },
+ [0x12512]={
+ category="lo",
+ direction="l",
+ },
+ [0x12513]={
+ category="lo",
+ direction="l",
+ },
+ [0x12514]={
+ category="lo",
+ direction="l",
+ },
+ [0x12515]={
+ category="lo",
+ direction="l",
+ },
+ [0x12516]={
+ category="lo",
+ direction="l",
+ },
+ [0x12517]={
+ category="lo",
+ direction="l",
+ },
+ [0x12518]={
+ category="lo",
+ direction="l",
+ },
+ [0x12519]={
+ category="lo",
+ direction="l",
+ },
+ [0x1251A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1251B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1251C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1251D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1251E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1251F]={
+ category="lo",
+ direction="l",
+ },
+ [0x12520]={
+ category="lo",
+ direction="l",
+ },
+ [0x12521]={
+ category="lo",
+ direction="l",
+ },
+ [0x12522]={
+ category="lo",
+ direction="l",
+ },
+ [0x12523]={
+ category="lo",
+ direction="l",
+ },
+ [0x12524]={
+ category="lo",
+ direction="l",
+ },
+ [0x12525]={
+ category="lo",
+ direction="l",
+ },
+ [0x12526]={
+ category="lo",
+ direction="l",
+ },
+ [0x12527]={
+ category="lo",
+ direction="l",
+ },
+ [0x12528]={
+ category="lo",
+ direction="l",
+ },
+ [0x12529]={
+ category="lo",
+ direction="l",
+ },
+ [0x1252A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1252B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1252C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1252D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1252E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1252F]={
+ category="lo",
+ direction="l",
+ },
+ [0x12530]={
+ category="lo",
+ direction="l",
+ },
+ [0x12531]={
+ category="lo",
+ direction="l",
+ },
+ [0x12532]={
+ category="lo",
+ direction="l",
+ },
+ [0x12533]={
+ category="lo",
+ direction="l",
+ },
+ [0x12534]={
+ category="lo",
+ direction="l",
+ },
+ [0x12535]={
+ category="lo",
+ direction="l",
+ },
+ [0x12536]={
+ category="lo",
+ direction="l",
+ },
+ [0x12537]={
+ category="lo",
+ direction="l",
+ },
+ [0x12538]={
+ category="lo",
+ direction="l",
+ },
+ [0x12539]={
+ category="lo",
+ direction="l",
+ },
+ [0x1253A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1253B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1253C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1253D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1253E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1253F]={
+ category="lo",
+ direction="l",
+ },
+ [0x12540]={
+ category="lo",
+ direction="l",
+ },
+ [0x12541]={
+ category="lo",
+ direction="l",
+ },
+ [0x12542]={
+ category="lo",
+ direction="l",
+ },
+ [0x12543]={
+ category="lo",
+ direction="l",
+ },
[0x13000]={
category="lo",
direction="l",
@@ -86611,6 +89159,2338 @@ return {
category="lo",
direction="l",
},
+ [0x14400]={
+ category="lo",
+ direction="l",
+ },
+ [0x14401]={
+ category="lo",
+ direction="l",
+ },
+ [0x14402]={
+ category="lo",
+ direction="l",
+ },
+ [0x14403]={
+ category="lo",
+ direction="l",
+ },
+ [0x14404]={
+ category="lo",
+ direction="l",
+ },
+ [0x14405]={
+ category="lo",
+ direction="l",
+ },
+ [0x14406]={
+ category="lo",
+ direction="l",
+ },
+ [0x14407]={
+ category="lo",
+ direction="l",
+ },
+ [0x14408]={
+ category="lo",
+ direction="l",
+ },
+ [0x14409]={
+ category="lo",
+ direction="l",
+ },
+ [0x1440A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1440B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1440C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1440D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1440E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1440F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14410]={
+ category="lo",
+ direction="l",
+ },
+ [0x14411]={
+ category="lo",
+ direction="l",
+ },
+ [0x14412]={
+ category="lo",
+ direction="l",
+ },
+ [0x14413]={
+ category="lo",
+ direction="l",
+ },
+ [0x14414]={
+ category="lo",
+ direction="l",
+ },
+ [0x14415]={
+ category="lo",
+ direction="l",
+ },
+ [0x14416]={
+ category="lo",
+ direction="l",
+ },
+ [0x14417]={
+ category="lo",
+ direction="l",
+ },
+ [0x14418]={
+ category="lo",
+ direction="l",
+ },
+ [0x14419]={
+ category="lo",
+ direction="l",
+ },
+ [0x1441A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1441B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1441C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1441D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1441E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1441F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14420]={
+ category="lo",
+ direction="l",
+ },
+ [0x14421]={
+ category="lo",
+ direction="l",
+ },
+ [0x14422]={
+ category="lo",
+ direction="l",
+ },
+ [0x14423]={
+ category="lo",
+ direction="l",
+ },
+ [0x14424]={
+ category="lo",
+ direction="l",
+ },
+ [0x14425]={
+ category="lo",
+ direction="l",
+ },
+ [0x14426]={
+ category="lo",
+ direction="l",
+ },
+ [0x14427]={
+ category="lo",
+ direction="l",
+ },
+ [0x14428]={
+ category="lo",
+ direction="l",
+ },
+ [0x14429]={
+ category="lo",
+ direction="l",
+ },
+ [0x1442A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1442B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1442C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1442D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1442E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1442F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14430]={
+ category="lo",
+ direction="l",
+ },
+ [0x14431]={
+ category="lo",
+ direction="l",
+ },
+ [0x14432]={
+ category="lo",
+ direction="l",
+ },
+ [0x14433]={
+ category="lo",
+ direction="l",
+ },
+ [0x14434]={
+ category="lo",
+ direction="l",
+ },
+ [0x14435]={
+ category="lo",
+ direction="l",
+ },
+ [0x14436]={
+ category="lo",
+ direction="l",
+ },
+ [0x14437]={
+ category="lo",
+ direction="l",
+ },
+ [0x14438]={
+ category="lo",
+ direction="l",
+ },
+ [0x14439]={
+ category="lo",
+ direction="l",
+ },
+ [0x1443A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1443B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1443C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1443D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1443E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1443F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14440]={
+ category="lo",
+ direction="l",
+ },
+ [0x14441]={
+ category="lo",
+ direction="l",
+ },
+ [0x14442]={
+ category="lo",
+ direction="l",
+ },
+ [0x14443]={
+ category="lo",
+ direction="l",
+ },
+ [0x14444]={
+ category="lo",
+ direction="l",
+ },
+ [0x14445]={
+ category="lo",
+ direction="l",
+ },
+ [0x14446]={
+ category="lo",
+ direction="l",
+ },
+ [0x14447]={
+ category="lo",
+ direction="l",
+ },
+ [0x14448]={
+ category="lo",
+ direction="l",
+ },
+ [0x14449]={
+ category="lo",
+ direction="l",
+ },
+ [0x1444A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1444B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1444C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1444D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1444E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1444F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14450]={
+ category="lo",
+ direction="l",
+ },
+ [0x14451]={
+ category="lo",
+ direction="l",
+ },
+ [0x14452]={
+ category="lo",
+ direction="l",
+ },
+ [0x14453]={
+ category="lo",
+ direction="l",
+ },
+ [0x14454]={
+ category="lo",
+ direction="l",
+ },
+ [0x14455]={
+ category="lo",
+ direction="l",
+ },
+ [0x14456]={
+ category="lo",
+ direction="l",
+ },
+ [0x14457]={
+ category="lo",
+ direction="l",
+ },
+ [0x14458]={
+ category="lo",
+ direction="l",
+ },
+ [0x14459]={
+ category="lo",
+ direction="l",
+ },
+ [0x1445A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1445B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1445C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1445D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1445E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1445F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14460]={
+ category="lo",
+ direction="l",
+ },
+ [0x14461]={
+ category="lo",
+ direction="l",
+ },
+ [0x14462]={
+ category="lo",
+ direction="l",
+ },
+ [0x14463]={
+ category="lo",
+ direction="l",
+ },
+ [0x14464]={
+ category="lo",
+ direction="l",
+ },
+ [0x14465]={
+ category="lo",
+ direction="l",
+ },
+ [0x14466]={
+ category="lo",
+ direction="l",
+ },
+ [0x14467]={
+ category="lo",
+ direction="l",
+ },
+ [0x14468]={
+ category="lo",
+ direction="l",
+ },
+ [0x14469]={
+ category="lo",
+ direction="l",
+ },
+ [0x1446A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1446B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1446C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1446D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1446E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1446F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14470]={
+ category="lo",
+ direction="l",
+ },
+ [0x14471]={
+ category="lo",
+ direction="l",
+ },
+ [0x14472]={
+ category="lo",
+ direction="l",
+ },
+ [0x14473]={
+ category="lo",
+ direction="l",
+ },
+ [0x14474]={
+ category="lo",
+ direction="l",
+ },
+ [0x14475]={
+ category="lo",
+ direction="l",
+ },
+ [0x14476]={
+ category="lo",
+ direction="l",
+ },
+ [0x14477]={
+ category="lo",
+ direction="l",
+ },
+ [0x14478]={
+ category="lo",
+ direction="l",
+ },
+ [0x14479]={
+ category="lo",
+ direction="l",
+ },
+ [0x1447A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1447B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1447C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1447D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1447E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1447F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14480]={
+ category="lo",
+ direction="l",
+ },
+ [0x14481]={
+ category="lo",
+ direction="l",
+ },
+ [0x14482]={
+ category="lo",
+ direction="l",
+ },
+ [0x14483]={
+ category="lo",
+ direction="l",
+ },
+ [0x14484]={
+ category="lo",
+ direction="l",
+ },
+ [0x14485]={
+ category="lo",
+ direction="l",
+ },
+ [0x14486]={
+ category="lo",
+ direction="l",
+ },
+ [0x14487]={
+ category="lo",
+ direction="l",
+ },
+ [0x14488]={
+ category="lo",
+ direction="l",
+ },
+ [0x14489]={
+ category="lo",
+ direction="l",
+ },
+ [0x1448A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1448B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1448C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1448D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1448E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1448F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14490]={
+ category="lo",
+ direction="l",
+ },
+ [0x14491]={
+ category="lo",
+ direction="l",
+ },
+ [0x14492]={
+ category="lo",
+ direction="l",
+ },
+ [0x14493]={
+ category="lo",
+ direction="l",
+ },
+ [0x14494]={
+ category="lo",
+ direction="l",
+ },
+ [0x14495]={
+ category="lo",
+ direction="l",
+ },
+ [0x14496]={
+ category="lo",
+ direction="l",
+ },
+ [0x14497]={
+ category="lo",
+ direction="l",
+ },
+ [0x14498]={
+ category="lo",
+ direction="l",
+ },
+ [0x14499]={
+ category="lo",
+ direction="l",
+ },
+ [0x1449A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1449B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1449C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1449D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1449E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1449F]={
+ category="lo",
+ direction="l",
+ },
+ [0x144A0]={
+ category="lo",
+ direction="l",
+ },
+ [0x144A1]={
+ category="lo",
+ direction="l",
+ },
+ [0x144A2]={
+ category="lo",
+ direction="l",
+ },
+ [0x144A3]={
+ category="lo",
+ direction="l",
+ },
+ [0x144A4]={
+ category="lo",
+ direction="l",
+ },
+ [0x144A5]={
+ category="lo",
+ direction="l",
+ },
+ [0x144A6]={
+ category="lo",
+ direction="l",
+ },
+ [0x144A7]={
+ category="lo",
+ direction="l",
+ },
+ [0x144A8]={
+ category="lo",
+ direction="l",
+ },
+ [0x144A9]={
+ category="lo",
+ direction="l",
+ },
+ [0x144AA]={
+ category="lo",
+ direction="l",
+ },
+ [0x144AB]={
+ category="lo",
+ direction="l",
+ },
+ [0x144AC]={
+ category="lo",
+ direction="l",
+ },
+ [0x144AD]={
+ category="lo",
+ direction="l",
+ },
+ [0x144AE]={
+ category="lo",
+ direction="l",
+ },
+ [0x144AF]={
+ category="lo",
+ direction="l",
+ },
+ [0x144B0]={
+ category="lo",
+ direction="l",
+ },
+ [0x144B1]={
+ category="lo",
+ direction="l",
+ },
+ [0x144B2]={
+ category="lo",
+ direction="l",
+ },
+ [0x144B3]={
+ category="lo",
+ direction="l",
+ },
+ [0x144B4]={
+ category="lo",
+ direction="l",
+ },
+ [0x144B5]={
+ category="lo",
+ direction="l",
+ },
+ [0x144B6]={
+ category="lo",
+ direction="l",
+ },
+ [0x144B7]={
+ category="lo",
+ direction="l",
+ },
+ [0x144B8]={
+ category="lo",
+ direction="l",
+ },
+ [0x144B9]={
+ category="lo",
+ direction="l",
+ },
+ [0x144BA]={
+ category="lo",
+ direction="l",
+ },
+ [0x144BB]={
+ category="lo",
+ direction="l",
+ },
+ [0x144BC]={
+ category="lo",
+ direction="l",
+ },
+ [0x144BD]={
+ category="lo",
+ direction="l",
+ },
+ [0x144BE]={
+ category="lo",
+ direction="l",
+ },
+ [0x144BF]={
+ category="lo",
+ direction="l",
+ },
+ [0x144C0]={
+ category="lo",
+ direction="l",
+ },
+ [0x144C1]={
+ category="lo",
+ direction="l",
+ },
+ [0x144C2]={
+ category="lo",
+ direction="l",
+ },
+ [0x144C3]={
+ category="lo",
+ direction="l",
+ },
+ [0x144C4]={
+ category="lo",
+ direction="l",
+ },
+ [0x144C5]={
+ category="lo",
+ direction="l",
+ },
+ [0x144C6]={
+ category="lo",
+ direction="l",
+ },
+ [0x144C7]={
+ category="lo",
+ direction="l",
+ },
+ [0x144C8]={
+ category="lo",
+ direction="l",
+ },
+ [0x144C9]={
+ category="lo",
+ direction="l",
+ },
+ [0x144CA]={
+ category="lo",
+ direction="l",
+ },
+ [0x144CB]={
+ category="lo",
+ direction="l",
+ },
+ [0x144CC]={
+ category="lo",
+ direction="l",
+ },
+ [0x144CD]={
+ category="lo",
+ direction="l",
+ },
+ [0x144CE]={
+ category="lo",
+ direction="l",
+ },
+ [0x144CF]={
+ category="lo",
+ direction="l",
+ },
+ [0x144D0]={
+ category="lo",
+ direction="l",
+ },
+ [0x144D1]={
+ category="lo",
+ direction="l",
+ },
+ [0x144D2]={
+ category="lo",
+ direction="l",
+ },
+ [0x144D3]={
+ category="lo",
+ direction="l",
+ },
+ [0x144D4]={
+ category="lo",
+ direction="l",
+ },
+ [0x144D5]={
+ category="lo",
+ direction="l",
+ },
+ [0x144D6]={
+ category="lo",
+ direction="l",
+ },
+ [0x144D7]={
+ category="lo",
+ direction="l",
+ },
+ [0x144D8]={
+ category="lo",
+ direction="l",
+ },
+ [0x144D9]={
+ category="lo",
+ direction="l",
+ },
+ [0x144DA]={
+ category="lo",
+ direction="l",
+ },
+ [0x144DB]={
+ category="lo",
+ direction="l",
+ },
+ [0x144DC]={
+ category="lo",
+ direction="l",
+ },
+ [0x144DD]={
+ category="lo",
+ direction="l",
+ },
+ [0x144DE]={
+ category="lo",
+ direction="l",
+ },
+ [0x144DF]={
+ category="lo",
+ direction="l",
+ },
+ [0x144E0]={
+ category="lo",
+ direction="l",
+ },
+ [0x144E1]={
+ category="lo",
+ direction="l",
+ },
+ [0x144E2]={
+ category="lo",
+ direction="l",
+ },
+ [0x144E3]={
+ category="lo",
+ direction="l",
+ },
+ [0x144E4]={
+ category="lo",
+ direction="l",
+ },
+ [0x144E5]={
+ category="lo",
+ direction="l",
+ },
+ [0x144E6]={
+ category="lo",
+ direction="l",
+ },
+ [0x144E7]={
+ category="lo",
+ direction="l",
+ },
+ [0x144E8]={
+ category="lo",
+ direction="l",
+ },
+ [0x144E9]={
+ category="lo",
+ direction="l",
+ },
+ [0x144EA]={
+ category="lo",
+ direction="l",
+ },
+ [0x144EB]={
+ category="lo",
+ direction="l",
+ },
+ [0x144EC]={
+ category="lo",
+ direction="l",
+ },
+ [0x144ED]={
+ category="lo",
+ direction="l",
+ },
+ [0x144EE]={
+ category="lo",
+ direction="l",
+ },
+ [0x144EF]={
+ category="lo",
+ direction="l",
+ },
+ [0x144F0]={
+ category="lo",
+ direction="l",
+ },
+ [0x144F1]={
+ category="lo",
+ direction="l",
+ },
+ [0x144F2]={
+ category="lo",
+ direction="l",
+ },
+ [0x144F3]={
+ category="lo",
+ direction="l",
+ },
+ [0x144F4]={
+ category="lo",
+ direction="l",
+ },
+ [0x144F5]={
+ category="lo",
+ direction="l",
+ },
+ [0x144F6]={
+ category="lo",
+ direction="l",
+ },
+ [0x144F7]={
+ category="lo",
+ direction="l",
+ },
+ [0x144F8]={
+ category="lo",
+ direction="l",
+ },
+ [0x144F9]={
+ category="lo",
+ direction="l",
+ },
+ [0x144FA]={
+ category="lo",
+ direction="l",
+ },
+ [0x144FB]={
+ category="lo",
+ direction="l",
+ },
+ [0x144FC]={
+ category="lo",
+ direction="l",
+ },
+ [0x144FD]={
+ category="lo",
+ direction="l",
+ },
+ [0x144FE]={
+ category="lo",
+ direction="l",
+ },
+ [0x144FF]={
+ category="lo",
+ direction="l",
+ },
+ [0x14500]={
+ category="lo",
+ direction="l",
+ },
+ [0x14501]={
+ category="lo",
+ direction="l",
+ },
+ [0x14502]={
+ category="lo",
+ direction="l",
+ },
+ [0x14503]={
+ category="lo",
+ direction="l",
+ },
+ [0x14504]={
+ category="lo",
+ direction="l",
+ },
+ [0x14505]={
+ category="lo",
+ direction="l",
+ },
+ [0x14506]={
+ category="lo",
+ direction="l",
+ },
+ [0x14507]={
+ category="lo",
+ direction="l",
+ },
+ [0x14508]={
+ category="lo",
+ direction="l",
+ },
+ [0x14509]={
+ category="lo",
+ direction="l",
+ },
+ [0x1450A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1450B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1450C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1450D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1450E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1450F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14510]={
+ category="lo",
+ direction="l",
+ },
+ [0x14511]={
+ category="lo",
+ direction="l",
+ },
+ [0x14512]={
+ category="lo",
+ direction="l",
+ },
+ [0x14513]={
+ category="lo",
+ direction="l",
+ },
+ [0x14514]={
+ category="lo",
+ direction="l",
+ },
+ [0x14515]={
+ category="lo",
+ direction="l",
+ },
+ [0x14516]={
+ category="lo",
+ direction="l",
+ },
+ [0x14517]={
+ category="lo",
+ direction="l",
+ },
+ [0x14518]={
+ category="lo",
+ direction="l",
+ },
+ [0x14519]={
+ category="lo",
+ direction="l",
+ },
+ [0x1451A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1451B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1451C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1451D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1451E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1451F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14520]={
+ category="lo",
+ direction="l",
+ },
+ [0x14521]={
+ category="lo",
+ direction="l",
+ },
+ [0x14522]={
+ category="lo",
+ direction="l",
+ },
+ [0x14523]={
+ category="lo",
+ direction="l",
+ },
+ [0x14524]={
+ category="lo",
+ direction="l",
+ },
+ [0x14525]={
+ category="lo",
+ direction="l",
+ },
+ [0x14526]={
+ category="lo",
+ direction="l",
+ },
+ [0x14527]={
+ category="lo",
+ direction="l",
+ },
+ [0x14528]={
+ category="lo",
+ direction="l",
+ },
+ [0x14529]={
+ category="lo",
+ direction="l",
+ },
+ [0x1452A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1452B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1452C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1452D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1452E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1452F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14530]={
+ category="lo",
+ direction="l",
+ },
+ [0x14531]={
+ category="lo",
+ direction="l",
+ },
+ [0x14532]={
+ category="lo",
+ direction="l",
+ },
+ [0x14533]={
+ category="lo",
+ direction="l",
+ },
+ [0x14534]={
+ category="lo",
+ direction="l",
+ },
+ [0x14535]={
+ category="lo",
+ direction="l",
+ },
+ [0x14536]={
+ category="lo",
+ direction="l",
+ },
+ [0x14537]={
+ category="lo",
+ direction="l",
+ },
+ [0x14538]={
+ category="lo",
+ direction="l",
+ },
+ [0x14539]={
+ category="lo",
+ direction="l",
+ },
+ [0x1453A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1453B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1453C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1453D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1453E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1453F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14540]={
+ category="lo",
+ direction="l",
+ },
+ [0x14541]={
+ category="lo",
+ direction="l",
+ },
+ [0x14542]={
+ category="lo",
+ direction="l",
+ },
+ [0x14543]={
+ category="lo",
+ direction="l",
+ },
+ [0x14544]={
+ category="lo",
+ direction="l",
+ },
+ [0x14545]={
+ category="lo",
+ direction="l",
+ },
+ [0x14546]={
+ category="lo",
+ direction="l",
+ },
+ [0x14547]={
+ category="lo",
+ direction="l",
+ },
+ [0x14548]={
+ category="lo",
+ direction="l",
+ },
+ [0x14549]={
+ category="lo",
+ direction="l",
+ },
+ [0x1454A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1454B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1454C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1454D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1454E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1454F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14550]={
+ category="lo",
+ direction="l",
+ },
+ [0x14551]={
+ category="lo",
+ direction="l",
+ },
+ [0x14552]={
+ category="lo",
+ direction="l",
+ },
+ [0x14553]={
+ category="lo",
+ direction="l",
+ },
+ [0x14554]={
+ category="lo",
+ direction="l",
+ },
+ [0x14555]={
+ category="lo",
+ direction="l",
+ },
+ [0x14556]={
+ category="lo",
+ direction="l",
+ },
+ [0x14557]={
+ category="lo",
+ direction="l",
+ },
+ [0x14558]={
+ category="lo",
+ direction="l",
+ },
+ [0x14559]={
+ category="lo",
+ direction="l",
+ },
+ [0x1455A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1455B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1455C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1455D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1455E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1455F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14560]={
+ category="lo",
+ direction="l",
+ },
+ [0x14561]={
+ category="lo",
+ direction="l",
+ },
+ [0x14562]={
+ category="lo",
+ direction="l",
+ },
+ [0x14563]={
+ category="lo",
+ direction="l",
+ },
+ [0x14564]={
+ category="lo",
+ direction="l",
+ },
+ [0x14565]={
+ category="lo",
+ direction="l",
+ },
+ [0x14566]={
+ category="lo",
+ direction="l",
+ },
+ [0x14567]={
+ category="lo",
+ direction="l",
+ },
+ [0x14568]={
+ category="lo",
+ direction="l",
+ },
+ [0x14569]={
+ category="lo",
+ direction="l",
+ },
+ [0x1456A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1456B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1456C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1456D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1456E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1456F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14570]={
+ category="lo",
+ direction="l",
+ },
+ [0x14571]={
+ category="lo",
+ direction="l",
+ },
+ [0x14572]={
+ category="lo",
+ direction="l",
+ },
+ [0x14573]={
+ category="lo",
+ direction="l",
+ },
+ [0x14574]={
+ category="lo",
+ direction="l",
+ },
+ [0x14575]={
+ category="lo",
+ direction="l",
+ },
+ [0x14576]={
+ category="lo",
+ direction="l",
+ },
+ [0x14577]={
+ category="lo",
+ direction="l",
+ },
+ [0x14578]={
+ category="lo",
+ direction="l",
+ },
+ [0x14579]={
+ category="lo",
+ direction="l",
+ },
+ [0x1457A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1457B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1457C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1457D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1457E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1457F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14580]={
+ category="lo",
+ direction="l",
+ },
+ [0x14581]={
+ category="lo",
+ direction="l",
+ },
+ [0x14582]={
+ category="lo",
+ direction="l",
+ },
+ [0x14583]={
+ category="lo",
+ direction="l",
+ },
+ [0x14584]={
+ category="lo",
+ direction="l",
+ },
+ [0x14585]={
+ category="lo",
+ direction="l",
+ },
+ [0x14586]={
+ category="lo",
+ direction="l",
+ },
+ [0x14587]={
+ category="lo",
+ direction="l",
+ },
+ [0x14588]={
+ category="lo",
+ direction="l",
+ },
+ [0x14589]={
+ category="lo",
+ direction="l",
+ },
+ [0x1458A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1458B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1458C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1458D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1458E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1458F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14590]={
+ category="lo",
+ direction="l",
+ },
+ [0x14591]={
+ category="lo",
+ direction="l",
+ },
+ [0x14592]={
+ category="lo",
+ direction="l",
+ },
+ [0x14593]={
+ category="lo",
+ direction="l",
+ },
+ [0x14594]={
+ category="lo",
+ direction="l",
+ },
+ [0x14595]={
+ category="lo",
+ direction="l",
+ },
+ [0x14596]={
+ category="lo",
+ direction="l",
+ },
+ [0x14597]={
+ category="lo",
+ direction="l",
+ },
+ [0x14598]={
+ category="lo",
+ direction="l",
+ },
+ [0x14599]={
+ category="lo",
+ direction="l",
+ },
+ [0x1459A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1459B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1459C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1459D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1459E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1459F]={
+ category="lo",
+ direction="l",
+ },
+ [0x145A0]={
+ category="lo",
+ direction="l",
+ },
+ [0x145A1]={
+ category="lo",
+ direction="l",
+ },
+ [0x145A2]={
+ category="lo",
+ direction="l",
+ },
+ [0x145A3]={
+ category="lo",
+ direction="l",
+ },
+ [0x145A4]={
+ category="lo",
+ direction="l",
+ },
+ [0x145A5]={
+ category="lo",
+ direction="l",
+ },
+ [0x145A6]={
+ category="lo",
+ direction="l",
+ },
+ [0x145A7]={
+ category="lo",
+ direction="l",
+ },
+ [0x145A8]={
+ category="lo",
+ direction="l",
+ },
+ [0x145A9]={
+ category="lo",
+ direction="l",
+ },
+ [0x145AA]={
+ category="lo",
+ direction="l",
+ },
+ [0x145AB]={
+ category="lo",
+ direction="l",
+ },
+ [0x145AC]={
+ category="lo",
+ direction="l",
+ },
+ [0x145AD]={
+ category="lo",
+ direction="l",
+ },
+ [0x145AE]={
+ category="lo",
+ direction="l",
+ },
+ [0x145AF]={
+ category="lo",
+ direction="l",
+ },
+ [0x145B0]={
+ category="lo",
+ direction="l",
+ },
+ [0x145B1]={
+ category="lo",
+ direction="l",
+ },
+ [0x145B2]={
+ category="lo",
+ direction="l",
+ },
+ [0x145B3]={
+ category="lo",
+ direction="l",
+ },
+ [0x145B4]={
+ category="lo",
+ direction="l",
+ },
+ [0x145B5]={
+ category="lo",
+ direction="l",
+ },
+ [0x145B6]={
+ category="lo",
+ direction="l",
+ },
+ [0x145B7]={
+ category="lo",
+ direction="l",
+ },
+ [0x145B8]={
+ category="lo",
+ direction="l",
+ },
+ [0x145B9]={
+ category="lo",
+ direction="l",
+ },
+ [0x145BA]={
+ category="lo",
+ direction="l",
+ },
+ [0x145BB]={
+ category="lo",
+ direction="l",
+ },
+ [0x145BC]={
+ category="lo",
+ direction="l",
+ },
+ [0x145BD]={
+ category="lo",
+ direction="l",
+ },
+ [0x145BE]={
+ category="lo",
+ direction="l",
+ },
+ [0x145BF]={
+ category="lo",
+ direction="l",
+ },
+ [0x145C0]={
+ category="lo",
+ direction="l",
+ },
+ [0x145C1]={
+ category="lo",
+ direction="l",
+ },
+ [0x145C2]={
+ category="lo",
+ direction="l",
+ },
+ [0x145C3]={
+ category="lo",
+ direction="l",
+ },
+ [0x145C4]={
+ category="lo",
+ direction="l",
+ },
+ [0x145C5]={
+ category="lo",
+ direction="l",
+ },
+ [0x145C6]={
+ category="lo",
+ direction="l",
+ },
+ [0x145C7]={
+ category="lo",
+ direction="l",
+ },
+ [0x145C8]={
+ category="lo",
+ direction="l",
+ },
+ [0x145C9]={
+ category="lo",
+ direction="l",
+ },
+ [0x145CA]={
+ category="lo",
+ direction="l",
+ },
+ [0x145CB]={
+ category="lo",
+ direction="l",
+ },
+ [0x145CC]={
+ category="lo",
+ direction="l",
+ },
+ [0x145CD]={
+ category="lo",
+ direction="l",
+ },
+ [0x145CE]={
+ category="lo",
+ direction="l",
+ },
+ [0x145CF]={
+ category="lo",
+ direction="l",
+ },
+ [0x145D0]={
+ category="lo",
+ direction="l",
+ },
+ [0x145D1]={
+ category="lo",
+ direction="l",
+ },
+ [0x145D2]={
+ category="lo",
+ direction="l",
+ },
+ [0x145D3]={
+ category="lo",
+ direction="l",
+ },
+ [0x145D4]={
+ category="lo",
+ direction="l",
+ },
+ [0x145D5]={
+ category="lo",
+ direction="l",
+ },
+ [0x145D6]={
+ category="lo",
+ direction="l",
+ },
+ [0x145D7]={
+ category="lo",
+ direction="l",
+ },
+ [0x145D8]={
+ category="lo",
+ direction="l",
+ },
+ [0x145D9]={
+ category="lo",
+ direction="l",
+ },
+ [0x145DA]={
+ category="lo",
+ direction="l",
+ },
+ [0x145DB]={
+ category="lo",
+ direction="l",
+ },
+ [0x145DC]={
+ category="lo",
+ direction="l",
+ },
+ [0x145DD]={
+ category="lo",
+ direction="l",
+ },
+ [0x145DE]={
+ category="lo",
+ direction="l",
+ },
+ [0x145DF]={
+ category="lo",
+ direction="l",
+ },
+ [0x145E0]={
+ category="lo",
+ direction="l",
+ },
+ [0x145E1]={
+ category="lo",
+ direction="l",
+ },
+ [0x145E2]={
+ category="lo",
+ direction="l",
+ },
+ [0x145E3]={
+ category="lo",
+ direction="l",
+ },
+ [0x145E4]={
+ category="lo",
+ direction="l",
+ },
+ [0x145E5]={
+ category="lo",
+ direction="l",
+ },
+ [0x145E6]={
+ category="lo",
+ direction="l",
+ },
+ [0x145E7]={
+ category="lo",
+ direction="l",
+ },
+ [0x145E8]={
+ category="lo",
+ direction="l",
+ },
+ [0x145E9]={
+ category="lo",
+ direction="l",
+ },
+ [0x145EA]={
+ category="lo",
+ direction="l",
+ },
+ [0x145EB]={
+ category="lo",
+ direction="l",
+ },
+ [0x145EC]={
+ category="lo",
+ direction="l",
+ },
+ [0x145ED]={
+ category="lo",
+ direction="l",
+ },
+ [0x145EE]={
+ category="lo",
+ direction="l",
+ },
+ [0x145EF]={
+ category="lo",
+ direction="l",
+ },
+ [0x145F0]={
+ category="lo",
+ direction="l",
+ },
+ [0x145F1]={
+ category="lo",
+ direction="l",
+ },
+ [0x145F2]={
+ category="lo",
+ direction="l",
+ },
+ [0x145F3]={
+ category="lo",
+ direction="l",
+ },
+ [0x145F4]={
+ category="lo",
+ direction="l",
+ },
+ [0x145F5]={
+ category="lo",
+ direction="l",
+ },
+ [0x145F6]={
+ category="lo",
+ direction="l",
+ },
+ [0x145F7]={
+ category="lo",
+ direction="l",
+ },
+ [0x145F8]={
+ category="lo",
+ direction="l",
+ },
+ [0x145F9]={
+ category="lo",
+ direction="l",
+ },
+ [0x145FA]={
+ category="lo",
+ direction="l",
+ },
+ [0x145FB]={
+ category="lo",
+ direction="l",
+ },
+ [0x145FC]={
+ category="lo",
+ direction="l",
+ },
+ [0x145FD]={
+ category="lo",
+ direction="l",
+ },
+ [0x145FE]={
+ category="lo",
+ direction="l",
+ },
+ [0x145FF]={
+ category="lo",
+ direction="l",
+ },
+ [0x14600]={
+ category="lo",
+ direction="l",
+ },
+ [0x14601]={
+ category="lo",
+ direction="l",
+ },
+ [0x14602]={
+ category="lo",
+ direction="l",
+ },
+ [0x14603]={
+ category="lo",
+ direction="l",
+ },
+ [0x14604]={
+ category="lo",
+ direction="l",
+ },
+ [0x14605]={
+ category="lo",
+ direction="l",
+ },
+ [0x14606]={
+ category="lo",
+ direction="l",
+ },
+ [0x14607]={
+ category="lo",
+ direction="l",
+ },
+ [0x14608]={
+ category="lo",
+ direction="l",
+ },
+ [0x14609]={
+ category="lo",
+ direction="l",
+ },
+ [0x1460A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1460B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1460C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1460D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1460E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1460F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14610]={
+ category="lo",
+ direction="l",
+ },
+ [0x14611]={
+ category="lo",
+ direction="l",
+ },
+ [0x14612]={
+ category="lo",
+ direction="l",
+ },
+ [0x14613]={
+ category="lo",
+ direction="l",
+ },
+ [0x14614]={
+ category="lo",
+ direction="l",
+ },
+ [0x14615]={
+ category="lo",
+ direction="l",
+ },
+ [0x14616]={
+ category="lo",
+ direction="l",
+ },
+ [0x14617]={
+ category="lo",
+ direction="l",
+ },
+ [0x14618]={
+ category="lo",
+ direction="l",
+ },
+ [0x14619]={
+ category="lo",
+ direction="l",
+ },
+ [0x1461A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1461B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1461C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1461D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1461E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1461F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14620]={
+ category="lo",
+ direction="l",
+ },
+ [0x14621]={
+ category="lo",
+ direction="l",
+ },
+ [0x14622]={
+ category="lo",
+ direction="l",
+ },
+ [0x14623]={
+ category="lo",
+ direction="l",
+ },
+ [0x14624]={
+ category="lo",
+ direction="l",
+ },
+ [0x14625]={
+ category="lo",
+ direction="l",
+ },
+ [0x14626]={
+ category="lo",
+ direction="l",
+ },
+ [0x14627]={
+ category="lo",
+ direction="l",
+ },
+ [0x14628]={
+ category="lo",
+ direction="l",
+ },
+ [0x14629]={
+ category="lo",
+ direction="l",
+ },
+ [0x1462A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1462B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1462C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1462D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1462E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1462F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14630]={
+ category="lo",
+ direction="l",
+ },
+ [0x14631]={
+ category="lo",
+ direction="l",
+ },
+ [0x14632]={
+ category="lo",
+ direction="l",
+ },
+ [0x14633]={
+ category="lo",
+ direction="l",
+ },
+ [0x14634]={
+ category="lo",
+ direction="l",
+ },
+ [0x14635]={
+ category="lo",
+ direction="l",
+ },
+ [0x14636]={
+ category="lo",
+ direction="l",
+ },
+ [0x14637]={
+ category="lo",
+ direction="l",
+ },
+ [0x14638]={
+ category="lo",
+ direction="l",
+ },
+ [0x14639]={
+ category="lo",
+ direction="l",
+ },
+ [0x1463A]={
+ category="lo",
+ direction="l",
+ },
+ [0x1463B]={
+ category="lo",
+ direction="l",
+ },
+ [0x1463C]={
+ category="lo",
+ direction="l",
+ },
+ [0x1463D]={
+ category="lo",
+ direction="l",
+ },
+ [0x1463E]={
+ category="lo",
+ direction="l",
+ },
+ [0x1463F]={
+ category="lo",
+ direction="l",
+ },
+ [0x14640]={
+ category="lo",
+ direction="l",
+ },
+ [0x14641]={
+ category="lo",
+ direction="l",
+ },
+ [0x14642]={
+ category="lo",
+ direction="l",
+ },
+ [0x14643]={
+ category="lo",
+ direction="l",
+ },
+ [0x14644]={
+ category="lo",
+ direction="l",
+ },
+ [0x14645]={
+ category="lo",
+ direction="l",
+ },
+ [0x14646]={
+ category="lo",
+ direction="l",
+ },
[0x16800]={
category="lo",
direction="l",
@@ -92703,6 +97583,50 @@ return {
category="so",
direction="l",
},
+ [0x1D1DE]={
+ category="so",
+ direction="l",
+ },
+ [0x1D1DF]={
+ category="so",
+ direction="l",
+ },
+ [0x1D1E0]={
+ category="so",
+ direction="l",
+ },
+ [0x1D1E1]={
+ category="so",
+ direction="l",
+ },
+ [0x1D1E2]={
+ category="so",
+ direction="l",
+ },
+ [0x1D1E3]={
+ category="so",
+ direction="l",
+ },
+ [0x1D1E4]={
+ category="so",
+ direction="l",
+ },
+ [0x1D1E5]={
+ category="so",
+ direction="l",
+ },
+ [0x1D1E6]={
+ category="so",
+ direction="l",
+ },
+ [0x1D1E7]={
+ category="so",
+ direction="l",
+ },
+ [0x1D1E8]={
+ category="so",
+ direction="l",
+ },
[0x1D200]={
category="so",
direction="on",
@@ -97387,6 +102311,2694 @@ return {
category="nd",
direction="en",
},
+ [0x1D800]={
+ category="so",
+ direction="l",
+ },
+ [0x1D801]={
+ category="so",
+ direction="l",
+ },
+ [0x1D802]={
+ category="so",
+ direction="l",
+ },
+ [0x1D803]={
+ category="so",
+ direction="l",
+ },
+ [0x1D804]={
+ category="so",
+ direction="l",
+ },
+ [0x1D805]={
+ category="so",
+ direction="l",
+ },
+ [0x1D806]={
+ category="so",
+ direction="l",
+ },
+ [0x1D807]={
+ category="so",
+ direction="l",
+ },
+ [0x1D808]={
+ category="so",
+ direction="l",
+ },
+ [0x1D809]={
+ category="so",
+ direction="l",
+ },
+ [0x1D80A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D80B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D80C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D80D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D80E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D80F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D810]={
+ category="so",
+ direction="l",
+ },
+ [0x1D811]={
+ category="so",
+ direction="l",
+ },
+ [0x1D812]={
+ category="so",
+ direction="l",
+ },
+ [0x1D813]={
+ category="so",
+ direction="l",
+ },
+ [0x1D814]={
+ category="so",
+ direction="l",
+ },
+ [0x1D815]={
+ category="so",
+ direction="l",
+ },
+ [0x1D816]={
+ category="so",
+ direction="l",
+ },
+ [0x1D817]={
+ category="so",
+ direction="l",
+ },
+ [0x1D818]={
+ category="so",
+ direction="l",
+ },
+ [0x1D819]={
+ category="so",
+ direction="l",
+ },
+ [0x1D81A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D81B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D81C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D81D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D81E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D81F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D820]={
+ category="so",
+ direction="l",
+ },
+ [0x1D821]={
+ category="so",
+ direction="l",
+ },
+ [0x1D822]={
+ category="so",
+ direction="l",
+ },
+ [0x1D823]={
+ category="so",
+ direction="l",
+ },
+ [0x1D824]={
+ category="so",
+ direction="l",
+ },
+ [0x1D825]={
+ category="so",
+ direction="l",
+ },
+ [0x1D826]={
+ category="so",
+ direction="l",
+ },
+ [0x1D827]={
+ category="so",
+ direction="l",
+ },
+ [0x1D828]={
+ category="so",
+ direction="l",
+ },
+ [0x1D829]={
+ category="so",
+ direction="l",
+ },
+ [0x1D82A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D82B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D82C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D82D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D82E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D82F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D830]={
+ category="so",
+ direction="l",
+ },
+ [0x1D831]={
+ category="so",
+ direction="l",
+ },
+ [0x1D832]={
+ category="so",
+ direction="l",
+ },
+ [0x1D833]={
+ category="so",
+ direction="l",
+ },
+ [0x1D834]={
+ category="so",
+ direction="l",
+ },
+ [0x1D835]={
+ category="so",
+ direction="l",
+ },
+ [0x1D836]={
+ category="so",
+ direction="l",
+ },
+ [0x1D837]={
+ category="so",
+ direction="l",
+ },
+ [0x1D838]={
+ category="so",
+ direction="l",
+ },
+ [0x1D839]={
+ category="so",
+ direction="l",
+ },
+ [0x1D83A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D83B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D83C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D83D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D83E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D83F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D840]={
+ category="so",
+ direction="l",
+ },
+ [0x1D841]={
+ category="so",
+ direction="l",
+ },
+ [0x1D842]={
+ category="so",
+ direction="l",
+ },
+ [0x1D843]={
+ category="so",
+ direction="l",
+ },
+ [0x1D844]={
+ category="so",
+ direction="l",
+ },
+ [0x1D845]={
+ category="so",
+ direction="l",
+ },
+ [0x1D846]={
+ category="so",
+ direction="l",
+ },
+ [0x1D847]={
+ category="so",
+ direction="l",
+ },
+ [0x1D848]={
+ category="so",
+ direction="l",
+ },
+ [0x1D849]={
+ category="so",
+ direction="l",
+ },
+ [0x1D84A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D84B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D84C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D84D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D84E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D84F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D850]={
+ category="so",
+ direction="l",
+ },
+ [0x1D851]={
+ category="so",
+ direction="l",
+ },
+ [0x1D852]={
+ category="so",
+ direction="l",
+ },
+ [0x1D853]={
+ category="so",
+ direction="l",
+ },
+ [0x1D854]={
+ category="so",
+ direction="l",
+ },
+ [0x1D855]={
+ category="so",
+ direction="l",
+ },
+ [0x1D856]={
+ category="so",
+ direction="l",
+ },
+ [0x1D857]={
+ category="so",
+ direction="l",
+ },
+ [0x1D858]={
+ category="so",
+ direction="l",
+ },
+ [0x1D859]={
+ category="so",
+ direction="l",
+ },
+ [0x1D85A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D85B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D85C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D85D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D85E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D85F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D860]={
+ category="so",
+ direction="l",
+ },
+ [0x1D861]={
+ category="so",
+ direction="l",
+ },
+ [0x1D862]={
+ category="so",
+ direction="l",
+ },
+ [0x1D863]={
+ category="so",
+ direction="l",
+ },
+ [0x1D864]={
+ category="so",
+ direction="l",
+ },
+ [0x1D865]={
+ category="so",
+ direction="l",
+ },
+ [0x1D866]={
+ category="so",
+ direction="l",
+ },
+ [0x1D867]={
+ category="so",
+ direction="l",
+ },
+ [0x1D868]={
+ category="so",
+ direction="l",
+ },
+ [0x1D869]={
+ category="so",
+ direction="l",
+ },
+ [0x1D86A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D86B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D86C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D86D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D86E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D86F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D870]={
+ category="so",
+ direction="l",
+ },
+ [0x1D871]={
+ category="so",
+ direction="l",
+ },
+ [0x1D872]={
+ category="so",
+ direction="l",
+ },
+ [0x1D873]={
+ category="so",
+ direction="l",
+ },
+ [0x1D874]={
+ category="so",
+ direction="l",
+ },
+ [0x1D875]={
+ category="so",
+ direction="l",
+ },
+ [0x1D876]={
+ category="so",
+ direction="l",
+ },
+ [0x1D877]={
+ category="so",
+ direction="l",
+ },
+ [0x1D878]={
+ category="so",
+ direction="l",
+ },
+ [0x1D879]={
+ category="so",
+ direction="l",
+ },
+ [0x1D87A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D87B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D87C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D87D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D87E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D87F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D880]={
+ category="so",
+ direction="l",
+ },
+ [0x1D881]={
+ category="so",
+ direction="l",
+ },
+ [0x1D882]={
+ category="so",
+ direction="l",
+ },
+ [0x1D883]={
+ category="so",
+ direction="l",
+ },
+ [0x1D884]={
+ category="so",
+ direction="l",
+ },
+ [0x1D885]={
+ category="so",
+ direction="l",
+ },
+ [0x1D886]={
+ category="so",
+ direction="l",
+ },
+ [0x1D887]={
+ category="so",
+ direction="l",
+ },
+ [0x1D888]={
+ category="so",
+ direction="l",
+ },
+ [0x1D889]={
+ category="so",
+ direction="l",
+ },
+ [0x1D88A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D88B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D88C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D88D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D88E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D88F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D890]={
+ category="so",
+ direction="l",
+ },
+ [0x1D891]={
+ category="so",
+ direction="l",
+ },
+ [0x1D892]={
+ category="so",
+ direction="l",
+ },
+ [0x1D893]={
+ category="so",
+ direction="l",
+ },
+ [0x1D894]={
+ category="so",
+ direction="l",
+ },
+ [0x1D895]={
+ category="so",
+ direction="l",
+ },
+ [0x1D896]={
+ category="so",
+ direction="l",
+ },
+ [0x1D897]={
+ category="so",
+ direction="l",
+ },
+ [0x1D898]={
+ category="so",
+ direction="l",
+ },
+ [0x1D899]={
+ category="so",
+ direction="l",
+ },
+ [0x1D89A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D89B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D89C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D89D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D89E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D89F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8A0]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8A1]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8A2]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8A3]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8A4]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8A5]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8A6]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8A7]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8A8]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8A9]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8AA]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8AB]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8AC]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8AD]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8AE]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8AF]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8B0]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8B1]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8B2]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8B3]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8B4]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8B5]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8B6]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8B7]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8B8]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8B9]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8BA]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8BB]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8BC]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8BD]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8BE]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8BF]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8C0]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8C1]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8C2]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8C3]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8C4]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8C5]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8C6]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8C7]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8C8]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8C9]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8CA]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8CB]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8CC]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8CD]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8CE]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8CF]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8D0]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8D1]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8D2]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8D3]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8D4]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8D5]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8D6]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8D7]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8D8]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8D9]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8DA]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8DB]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8DC]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8DD]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8DE]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8DF]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8E0]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8E1]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8E2]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8E3]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8E4]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8E5]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8E6]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8E7]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8E8]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8E9]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8EA]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8EB]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8EC]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8ED]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8EE]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8EF]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8F0]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8F1]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8F2]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8F3]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8F4]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8F5]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8F6]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8F7]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8F8]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8F9]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8FA]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8FB]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8FC]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8FD]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8FE]={
+ category="so",
+ direction="l",
+ },
+ [0x1D8FF]={
+ category="so",
+ direction="l",
+ },
+ [0x1D900]={
+ category="so",
+ direction="l",
+ },
+ [0x1D901]={
+ category="so",
+ direction="l",
+ },
+ [0x1D902]={
+ category="so",
+ direction="l",
+ },
+ [0x1D903]={
+ category="so",
+ direction="l",
+ },
+ [0x1D904]={
+ category="so",
+ direction="l",
+ },
+ [0x1D905]={
+ category="so",
+ direction="l",
+ },
+ [0x1D906]={
+ category="so",
+ direction="l",
+ },
+ [0x1D907]={
+ category="so",
+ direction="l",
+ },
+ [0x1D908]={
+ category="so",
+ direction="l",
+ },
+ [0x1D909]={
+ category="so",
+ direction="l",
+ },
+ [0x1D90A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D90B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D90C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D90D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D90E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D90F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D910]={
+ category="so",
+ direction="l",
+ },
+ [0x1D911]={
+ category="so",
+ direction="l",
+ },
+ [0x1D912]={
+ category="so",
+ direction="l",
+ },
+ [0x1D913]={
+ category="so",
+ direction="l",
+ },
+ [0x1D914]={
+ category="so",
+ direction="l",
+ },
+ [0x1D915]={
+ category="so",
+ direction="l",
+ },
+ [0x1D916]={
+ category="so",
+ direction="l",
+ },
+ [0x1D917]={
+ category="so",
+ direction="l",
+ },
+ [0x1D918]={
+ category="so",
+ direction="l",
+ },
+ [0x1D919]={
+ category="so",
+ direction="l",
+ },
+ [0x1D91A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D91B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D91C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D91D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D91E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D91F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D920]={
+ category="so",
+ direction="l",
+ },
+ [0x1D921]={
+ category="so",
+ direction="l",
+ },
+ [0x1D922]={
+ category="so",
+ direction="l",
+ },
+ [0x1D923]={
+ category="so",
+ direction="l",
+ },
+ [0x1D924]={
+ category="so",
+ direction="l",
+ },
+ [0x1D925]={
+ category="so",
+ direction="l",
+ },
+ [0x1D926]={
+ category="so",
+ direction="l",
+ },
+ [0x1D927]={
+ category="so",
+ direction="l",
+ },
+ [0x1D928]={
+ category="so",
+ direction="l",
+ },
+ [0x1D929]={
+ category="so",
+ direction="l",
+ },
+ [0x1D92A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D92B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D92C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D92D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D92E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D92F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D930]={
+ category="so",
+ direction="l",
+ },
+ [0x1D931]={
+ category="so",
+ direction="l",
+ },
+ [0x1D932]={
+ category="so",
+ direction="l",
+ },
+ [0x1D933]={
+ category="so",
+ direction="l",
+ },
+ [0x1D934]={
+ category="so",
+ direction="l",
+ },
+ [0x1D935]={
+ category="so",
+ direction="l",
+ },
+ [0x1D936]={
+ category="so",
+ direction="l",
+ },
+ [0x1D937]={
+ category="so",
+ direction="l",
+ },
+ [0x1D938]={
+ category="so",
+ direction="l",
+ },
+ [0x1D939]={
+ category="so",
+ direction="l",
+ },
+ [0x1D93A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D93B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D93C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D93D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D93E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D93F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D940]={
+ category="so",
+ direction="l",
+ },
+ [0x1D941]={
+ category="so",
+ direction="l",
+ },
+ [0x1D942]={
+ category="so",
+ direction="l",
+ },
+ [0x1D943]={
+ category="so",
+ direction="l",
+ },
+ [0x1D944]={
+ category="so",
+ direction="l",
+ },
+ [0x1D945]={
+ category="so",
+ direction="l",
+ },
+ [0x1D946]={
+ category="so",
+ direction="l",
+ },
+ [0x1D947]={
+ category="so",
+ direction="l",
+ },
+ [0x1D948]={
+ category="so",
+ direction="l",
+ },
+ [0x1D949]={
+ category="so",
+ direction="l",
+ },
+ [0x1D94A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D94B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D94C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D94D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D94E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D94F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D950]={
+ category="so",
+ direction="l",
+ },
+ [0x1D951]={
+ category="so",
+ direction="l",
+ },
+ [0x1D952]={
+ category="so",
+ direction="l",
+ },
+ [0x1D953]={
+ category="so",
+ direction="l",
+ },
+ [0x1D954]={
+ category="so",
+ direction="l",
+ },
+ [0x1D955]={
+ category="so",
+ direction="l",
+ },
+ [0x1D956]={
+ category="so",
+ direction="l",
+ },
+ [0x1D957]={
+ category="so",
+ direction="l",
+ },
+ [0x1D958]={
+ category="so",
+ direction="l",
+ },
+ [0x1D959]={
+ category="so",
+ direction="l",
+ },
+ [0x1D95A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D95B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D95C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D95D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D95E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D95F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D960]={
+ category="so",
+ direction="l",
+ },
+ [0x1D961]={
+ category="so",
+ direction="l",
+ },
+ [0x1D962]={
+ category="so",
+ direction="l",
+ },
+ [0x1D963]={
+ category="so",
+ direction="l",
+ },
+ [0x1D964]={
+ category="so",
+ direction="l",
+ },
+ [0x1D965]={
+ category="so",
+ direction="l",
+ },
+ [0x1D966]={
+ category="so",
+ direction="l",
+ },
+ [0x1D967]={
+ category="so",
+ direction="l",
+ },
+ [0x1D968]={
+ category="so",
+ direction="l",
+ },
+ [0x1D969]={
+ category="so",
+ direction="l",
+ },
+ [0x1D96A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D96B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D96C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D96D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D96E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D96F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D970]={
+ category="so",
+ direction="l",
+ },
+ [0x1D971]={
+ category="so",
+ direction="l",
+ },
+ [0x1D972]={
+ category="so",
+ direction="l",
+ },
+ [0x1D973]={
+ category="so",
+ direction="l",
+ },
+ [0x1D974]={
+ category="so",
+ direction="l",
+ },
+ [0x1D975]={
+ category="so",
+ direction="l",
+ },
+ [0x1D976]={
+ category="so",
+ direction="l",
+ },
+ [0x1D977]={
+ category="so",
+ direction="l",
+ },
+ [0x1D978]={
+ category="so",
+ direction="l",
+ },
+ [0x1D979]={
+ category="so",
+ direction="l",
+ },
+ [0x1D97A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D97B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D97C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D97D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D97E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D97F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D980]={
+ category="so",
+ direction="l",
+ },
+ [0x1D981]={
+ category="so",
+ direction="l",
+ },
+ [0x1D982]={
+ category="so",
+ direction="l",
+ },
+ [0x1D983]={
+ category="so",
+ direction="l",
+ },
+ [0x1D984]={
+ category="so",
+ direction="l",
+ },
+ [0x1D985]={
+ category="so",
+ direction="l",
+ },
+ [0x1D986]={
+ category="so",
+ direction="l",
+ },
+ [0x1D987]={
+ category="so",
+ direction="l",
+ },
+ [0x1D988]={
+ category="so",
+ direction="l",
+ },
+ [0x1D989]={
+ category="so",
+ direction="l",
+ },
+ [0x1D98A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D98B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D98C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D98D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D98E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D98F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D990]={
+ category="so",
+ direction="l",
+ },
+ [0x1D991]={
+ category="so",
+ direction="l",
+ },
+ [0x1D992]={
+ category="so",
+ direction="l",
+ },
+ [0x1D993]={
+ category="so",
+ direction="l",
+ },
+ [0x1D994]={
+ category="so",
+ direction="l",
+ },
+ [0x1D995]={
+ category="so",
+ direction="l",
+ },
+ [0x1D996]={
+ category="so",
+ direction="l",
+ },
+ [0x1D997]={
+ category="so",
+ direction="l",
+ },
+ [0x1D998]={
+ category="so",
+ direction="l",
+ },
+ [0x1D999]={
+ category="so",
+ direction="l",
+ },
+ [0x1D99A]={
+ category="so",
+ direction="l",
+ },
+ [0x1D99B]={
+ category="so",
+ direction="l",
+ },
+ [0x1D99C]={
+ category="so",
+ direction="l",
+ },
+ [0x1D99D]={
+ category="so",
+ direction="l",
+ },
+ [0x1D99E]={
+ category="so",
+ direction="l",
+ },
+ [0x1D99F]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9A0]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9A1]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9A2]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9A3]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9A4]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9A5]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9A6]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9A7]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9A8]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9A9]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9AA]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9AB]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9AC]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9AD]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9AE]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9AF]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9B0]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9B1]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9B2]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9B3]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9B4]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9B5]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9B6]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9B7]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9B8]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9B9]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9BA]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9BB]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9BC]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9BD]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9BE]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9BF]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9C0]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9C1]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9C2]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9C3]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9C4]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9C5]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9C6]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9C7]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9C8]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9C9]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9CA]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9CB]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9CC]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9CD]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9CE]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9CF]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9D0]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9D1]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9D2]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9D3]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9D4]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9D5]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9D6]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9D7]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9D8]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9D9]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9DA]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9DB]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9DC]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9DD]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9DE]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9DF]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9E0]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9E1]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9E2]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9E3]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9E4]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9E5]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9E6]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9E7]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9E8]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9E9]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9EA]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9EB]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9EC]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9ED]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9EE]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9EF]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9F0]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9F1]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9F2]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9F3]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9F4]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9F5]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9F6]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9F7]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9F8]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9F9]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9FA]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9FB]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9FC]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9FD]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9FE]={
+ category="so",
+ direction="l",
+ },
+ [0x1D9FF]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA00]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA01]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA02]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA03]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA04]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA05]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA06]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA07]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA08]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA09]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA0A]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA0B]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA0C]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA0D]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA0E]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA0F]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA10]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA11]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA12]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA13]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA14]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA15]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA16]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA17]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA18]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA19]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA1A]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA1B]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA1C]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA1D]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA1E]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA1F]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA20]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA21]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA22]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA23]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA24]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA25]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA26]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA27]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA28]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA29]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA2A]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA2B]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA2C]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA2D]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA2E]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA2F]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA30]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA31]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA32]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA33]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA34]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA35]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA36]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA37]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA38]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA39]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA3A]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA3B]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA3C]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA3D]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA3E]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA3F]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA40]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA41]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA42]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA43]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA44]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA45]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA46]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA47]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA48]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA49]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA4A]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA4B]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA4C]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA4D]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA4E]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA4F]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA50]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA51]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA52]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA53]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA54]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA55]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA56]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA57]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA58]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA59]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA5A]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA5B]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA5C]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA5D]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA5E]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA5F]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA60]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA61]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA62]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA63]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA64]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA65]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA66]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA67]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA68]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA69]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA6A]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA6B]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA6C]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA6D]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA6E]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA6F]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA70]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA71]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA72]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA73]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA74]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA75]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA76]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA77]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA78]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA79]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA7A]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA7B]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA7C]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA7D]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA7E]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA7F]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA80]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA81]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA82]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA83]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA84]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA85]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA86]={
+ category="so",
+ direction="l",
+ },
+ [0x1DA87]={
+ category="po",
+ direction="l",
+ },
+ [0x1DA88]={
+ category="po",
+ direction="l",
+ },
+ [0x1DA89]={
+ category="po",
+ direction="l",
+ },
+ [0x1DA8A]={
+ category="po",
+ direction="l",
+ },
+ [0x1DA8B]={
+ category="po",
+ direction="l",
+ },
+ [0x1DA9B]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA9C]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA9D]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA9E]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DA9F]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAA1]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAA2]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAA3]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAA4]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAA5]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAA6]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAA7]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAA8]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAA9]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAAA]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAAB]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAAC]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAAD]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAAE]={
+ category="mn",
+ direction="nsm",
+ },
+ [0x1DAAF]={
+ category="mn",
+ direction="nsm",
+ },
[0x1E800]={
category="lo",
direction="r",
@@ -100815,6 +108427,18 @@ return {
category="so",
direction="on",
},
+ [0x1F32D]={
+ category="so",
+ direction="on",
+ },
+ [0x1F32E]={
+ category="so",
+ direction="on",
+ },
+ [0x1F32F]={
+ category="so",
+ direction="on",
+ },
[0x1F330]={
category="so",
direction="on",
@@ -101127,6 +108751,14 @@ return {
category="so",
direction="on",
},
+ [0x1F37E]={
+ category="so",
+ direction="on",
+ },
+ [0x1F37F]={
+ category="so",
+ direction="on",
+ },
[0x1F380]={
category="so",
direction="on",
@@ -101443,6 +109075,26 @@ return {
category="so",
direction="on",
},
+ [0x1F3CF]={
+ category="so",
+ direction="on",
+ },
+ [0x1F3D0]={
+ category="so",
+ direction="on",
+ },
+ [0x1F3D1]={
+ category="so",
+ direction="on",
+ },
+ [0x1F3D2]={
+ category="so",
+ direction="on",
+ },
+ [0x1F3D3]={
+ category="so",
+ direction="on",
+ },
[0x1F3D4]={
category="so",
direction="on",
@@ -101587,6 +109239,38 @@ return {
category="so",
direction="on",
},
+ [0x1F3F8]={
+ category="so",
+ direction="on",
+ },
+ [0x1F3F9]={
+ category="so",
+ direction="on",
+ },
+ [0x1F3FA]={
+ category="so",
+ direction="on",
+ },
+ [0x1F3FB]={
+ category="sk",
+ direction="on",
+ },
+ [0x1F3FC]={
+ category="sk",
+ direction="on",
+ },
+ [0x1F3FD]={
+ category="sk",
+ direction="on",
+ },
+ [0x1F3FE]={
+ category="sk",
+ direction="on",
+ },
+ [0x1F3FF]={
+ category="sk",
+ direction="on",
+ },
[0x1F400]={
category="so",
direction="on",
@@ -102607,6 +110291,10 @@ return {
category="so",
direction="on",
},
+ [0x1F4FF]={
+ category="so",
+ direction="on",
+ },
[0x1F500]={
category="so",
direction="on",
@@ -102907,6 +110595,26 @@ return {
category="so",
direction="on",
},
+ [0x1F54B]={
+ category="so",
+ direction="on",
+ },
+ [0x1F54C]={
+ category="so",
+ direction="on",
+ },
+ [0x1F54D]={
+ category="so",
+ direction="on",
+ },
+ [0x1F54E]={
+ category="so",
+ direction="on",
+ },
+ [0x1F54F]={
+ category="so",
+ direction="on",
+ },
[0x1F550]={
category="so",
direction="on",
@@ -103871,6 +111579,14 @@ return {
category="so",
direction="on",
},
+ [0x1F643]={
+ category="so",
+ direction="on",
+ },
+ [0x1F644]={
+ category="so",
+ direction="on",
+ },
[0x1F645]={
category="so",
direction="on",
@@ -104427,6 +112143,10 @@ return {
category="so",
direction="on",
},
+ [0x1F6D0]={
+ category="so",
+ direction="on",
+ },
[0x1F6E0]={
category="so",
direction="on",
@@ -105891,6 +113611,66 @@ return {
category="so",
direction="on",
},
+ [0x1F910]={
+ category="so",
+ direction="on",
+ },
+ [0x1F911]={
+ category="so",
+ direction="on",
+ },
+ [0x1F912]={
+ category="so",
+ direction="on",
+ },
+ [0x1F913]={
+ category="so",
+ direction="on",
+ },
+ [0x1F914]={
+ category="so",
+ direction="on",
+ },
+ [0x1F915]={
+ category="so",
+ direction="on",
+ },
+ [0x1F916]={
+ category="so",
+ direction="on",
+ },
+ [0x1F917]={
+ category="so",
+ direction="on",
+ },
+ [0x1F918]={
+ category="so",
+ direction="on",
+ },
+ [0x1F980]={
+ category="so",
+ direction="on",
+ },
+ [0x1F981]={
+ category="so",
+ direction="on",
+ },
+ [0x1F982]={
+ category="so",
+ direction="on",
+ },
+ [0x1F983]={
+ category="so",
+ direction="on",
+ },
+ [0x1F984]={
+ category="so",
+ direction="on",
+ },
+ [0x1F9C0]={
+ category="so",
+ direction="on",
+ },
[0x2F800]={
category="lo",
direction="l",
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-colors.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-colors.lua
index 9be29746c4b..c75696a8469 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-colors.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-colors.lua
@@ -1,7 +1,7 @@
if not modules then modules = { } end modules ['luaotfload-colors'] = {
- version = "2.5",
+ version = "2.6",
comment = "companion to luaotfload-main.lua (font color)",
- author = "Khaled Hosny, Elie Roux, Philipp Gesang",
+ author = "Khaled Hosny, Elie Roux, Philipp Gesang, Dohyun Kim, David Carlisle",
copyright = "Luaotfload Development Team",
license = "GNU GPL v2.0"
}
@@ -19,26 +19,26 @@ explanation: http://tug.org/pipermail/luatex/2013-May/004305.html
--doc]]--
-local log = luaotfload.log
-local logreport = log.report
-
-local newnode = node.new
-local nodetype = node.id
-local traverse_nodes = node.traverse
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-
-local texset = tex.set
-local texget = tex.get
+local logreport = luaotfload and luaotfload.log.report or print
+
+local nodedirect = node.direct
+local newnode = nodedirect.new
+local insert_node_before = nodedirect.insert_before
+local insert_node_after = nodedirect.insert_after
+local todirect = nodedirect.todirect
+local tonode = nodedirect.tonode
+local setfield = nodedirect.setfield
+local getid = nodedirect.getid
+local getfont = nodedirect.getfont
+local getlist = nodedirect.getlist
+local getsubtype = nodedirect.getsubtype
+local getnext = nodedirect.getnext
+local nodetail = nodedirect.tail
+local getattribute = nodedirect.has_attribute
+local setattribute = nodedirect.set_attribute
local stringformat = string.format
-local stringgsub = string.gsub
-local stringfind = string.find
-local stringsub = string.sub
-
-local otffeatures = fonts.constructors.newfeatures("otf")
local identifiers = fonts.hashes.identifiers
-local registerotffeature = otffeatures.register
local add_color_callback --[[ this used to be a global‽ ]]
@@ -66,10 +66,11 @@ local lpegmatch = lpeg.match
local C, Cg, Ct, P, R, S = lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.R, lpeg.S
local digit16 = R("09", "af", "AF")
+local opaque = S("fF") * S("fF")
local octet = C(digit16 * digit16)
local p_rgb = octet * octet * octet
-local p_rgba = p_rgb * octet
+local p_rgba = p_rgb * (octet - opaque)
local valid_digits = C(p_rgba + p_rgb) -- matches eight or six hex digits
local p_Crgb = Cg(octet/hex_to_dec, "red") --- for captures
@@ -91,36 +92,6 @@ local sanitize_color_expression = function (digits)
return sanitized
end
---[[doc--
-``setcolor`` modifies tfmdata.properties.color in place
---doc]]--
-
---- fontobj -> string -> unit
----
---- (where “string” is a rgb value as three octet
---- hexadecimal, with an optional fourth transparency
---- value)
----
-local setcolor = function (tfmdata, value)
- local sanitized = sanitize_color_expression(value)
- local properties = tfmdata.properties
-
- if sanitized then
- properties.color = sanitized
- add_color_callback()
- end
-end
-
-registerotffeature {
- name = "color",
- description = "color",
- initializers = {
- base = setcolor,
- node = setcolor,
- }
-}
-
-
--- something is carried around in ``res``
--- for later use by color_handler() --- but what?
@@ -174,41 +145,51 @@ end
--- Luatex internal types
+local nodetype = node.id
local glyph_t = nodetype("glyph")
local hlist_t = nodetype("hlist")
local vlist_t = nodetype("vlist")
local whatsit_t = nodetype("whatsit")
-local page_insert_t = nodetype("page_insert")
-local sub_box_t = nodetype("sub_box")
-
---- node -> nil | -1 | color‽
-local lookup_next_color
-lookup_next_color = function (head) --- paragraph material
- for n in traverse_nodes(head) do
- local n_id = n.id
-
- if n_id == glyph_t then
- local n_font
- if identifiers[n_font]
- and identifiers[n_font].properties
- and identifiers[n_font].properties.color
- then
- return identifiers[n.font].properties.color
- else
- return -1
- end
-
- elseif n_id == vlist_t or n_id == hlist_t or n_id == sub_box_t then
- local r = lookup_next_color(n.list)
- if r then
- return r
- end
-
- elseif n_id == whatsit_t or n_id == page_insert_t then
- return -1
+local disc_t = nodetype("disc")
+local pdfliteral_t = node.subtype("pdf_literal")
+local colorstack_t = node.subtype("pdf_colorstack")
+local mlist_to_hlist = node.mlist_to_hlist
+
+local color_callback
+local color_attr = luatexbase.new_attribute("luaotfload_color_attribute")
+
+-- (node * node * string * bool * (bool | nil)) -> (node * node * (string | nil))
+local color_whatsit
+color_whatsit = function (head, curr, color, push, tail)
+ local pushdata = hex_to_rgba(color)
+ local colornode = newnode(whatsit_t, colorstack_t)
+ setfield(colornode, "stack", 0)
+ setfield(colornode, "command", push and 1 or 2) -- 1: push, 2: pop
+ setfield(colornode, "data", push and pushdata or nil)
+ if tail then
+ head, curr = insert_node_after (head, curr, colornode)
+ else
+ head = insert_node_before(head, curr, colornode)
+ end
+ if not push and color:len() > 6 then
+ local colornode = newnode(whatsit_t, pdfliteral_t)
+ setfield(colornode, "mode", 2)
+ setfield(colornode, "data", "/TransGs1 gs")
+ if tail then
+ head, curr = insert_node_after (head, curr, colornode)
+ else
+ head = insert_node_before(head, curr, colornode)
end
end
- return nil
+ color = push and color or nil
+ return head, curr, color
+end
+
+-- number -> string | nil
+local get_font_color = function (font_id)
+ local tfmdata = identifiers[font_id]
+ local font_color = tfmdata and tfmdata.properties and tfmdata.properties.color
+ return font_color
end
--[[doc--
@@ -217,103 +198,208 @@ always nil when the function is called, they temporarily take string
values during the node list traversal.
--doc]]--
-local cnt = 0
---- node -> string -> int -> (node * string)
+--- (node * (string | nil)) -> (node * (string | nil))
local node_colorize
-node_colorize = function (head, current_color, next_color)
- for n in traverse_nodes(head) do
- local n_id = n.id
- local nextnode = n.next
-
- if n_id == hlist_t or n_id == vlist_t or n_id == sub_box_t then
- local next_color_in = lookup_next_color(nextnode) or next_color
- n.list, current_color = node_colorize(n.list, current_color, next_color_in)
+node_colorize = function (head, toplevel, current_color)
+ local n = head
+ while n do
+ local n_id = getid(n)
+
+ if n_id == hlist_t or n_id == vlist_t then
+ local n_list = getlist(n)
+ if getattribute(n_list, color_attr) then
+ if current_color then
+ head, n, current_color = color_whatsit(head, n, current_color, false)
+ end
+ else
+ n_list, current_color = node_colorize(n_list, false, current_color)
+ if current_color and getsubtype(n) == 1 then -- created by linebreak
+ n_list, _, current_color = color_whatsit(n_list, nodetail(n_list), current_color, false, true)
+ end
+ setfield(n, "head", n_list)
+ end
elseif n_id == glyph_t then
- cnt = cnt + 1
- local tfmdata = identifiers[n.font]
-
--- colorization is restricted to those fonts
--- that received the “color” property upon
--- loading (see ``setcolor()`` above)
- if tfmdata and tfmdata.properties and tfmdata.properties.color then
- local font_color = tfmdata.properties.color
--- luaotfload.info(
--- "n: %d; %s; %d %s, %s",
--- cnt, utf.char(n.char), n.font, "<TRUE>", font_color)
- if font_color ~= current_color then
- local pushcolor = hex_to_rgba(font_color)
- local push = newnode(whatsit_t, 8)
- push.mode = 1
- push.data = pushcolor
- head = insert_node_before(head, n, push)
- current_color = font_color
+ local font_color = get_font_color(getfont(n))
+ if font_color ~= current_color then
+ if current_color then
+ head, n, current_color = color_whatsit(head, n, current_color, false)
end
- local next_color_in = lookup_next_color (nextnode) or next_color
- if next_color_in ~= font_color then
- local _, popcolor = hex_to_rgba(font_color)
- local pop = newnode(whatsit_t, 8)
- pop.mode = 1
- pop.data = popcolor
- head = insert_node_after(head, n, pop)
- current_color = nil
+ if font_color then
+ head, n, current_color = color_whatsit(head, n, font_color, true)
end
+ end
--- else
--- luaotfload.info(
--- "n: %d; %s; %d %s",
--- cnt, utf.char(n.char), n.font, "<FALSE>")
+ if current_color and color_callback == "pre_linebreak_filter" then
+ local nn = getnext(n)
+ while nn and getid(nn) == glyph_t do
+ local font_color = get_font_color(getfont(nn))
+ if font_color == current_color then
+ n = nn
+ else
+ break
+ end
+ nn = getnext(nn)
+ end
+ if getid(nn) == disc_t then
+ head, n, current_color = color_whatsit(head, nn, current_color, false, true)
+ else
+ head, n, current_color = color_whatsit(head, n, current_color, false, true)
+ end
+ end
+
+ elseif n_id == whatsit_t then
+ if current_color then
+ head, n, current_color = color_whatsit(head, n, current_color, false)
end
+
end
+
+ n = getnext(n)
end
+
+ if toplevel and current_color then
+ head, _, current_color = color_whatsit(head, nodetail(head), current_color, false, true)
+ end
+
+ setattribute(head, color_attr, 1)
return head, current_color
end
+local getpageres = pdf.getpageresources or function() return pdf.pageresources end
+local setpageres = pdf.setpageresources or function(s) pdf.pageresources = s end
+local catat11 = luatexbase.registernumber("catcodetable@atletter")
+local gettoks, scantoks = tex.gettoks, tex.scantoks
+local pgf = { bye = "pgfutil@everybye", extgs = "\\pgf@sys@addpdfresource@extgs@plain" }
+
--- node -> node
local color_handler = function (head)
- local new_head = node_colorize(head, nil, nil)
+ head = todirect(head)
+ head = node_colorize(head, true)
+ head = tonode(head)
+
-- now append our page resources
if res then
res["1"] = true
- local tpr = texget("pdfpageresources")
+ if scantoks and pgf.bye and not pgf.loaded then
+ pgf.loaded = token.create(pgf.bye).cmdname == "assign_toks"
+ pgf.bye = pgf.loaded and pgf.bye
+ end
+ local tpr = pgf.loaded and gettoks(pgf.bye) or getpageres() or ""
+
local t = ""
for k in pairs(res) do
- local str = stringformat("/TransGs%s<</ca %s/CA %s>>", k, k, k)
- if not stringfind(tpr,str) then
+ local str = stringformat("/TransGs%s<</ca %s>>", k, k) -- don't touch stroking elements
+ if not tpr:find(str) then
t = t .. str
end
end
- print""
if t ~= "" then
- print(">>", tpr, "<<")
- if not stringfind(tpr,"/ExtGState<<.*>>") then
- tpr = tpr.."/ExtGState<<>>"
+ if pgf.loaded then
+ scantoks("global", pgf.bye, catat11, stringformat("%s{%s}%s", pgf.extgs, t, tpr))
+ else
+ local tpr, n = tpr:gsub("/ExtGState<<", "%1"..t)
+ if n == 0 then
+ tpr = stringformat("%s/ExtGState<<%s>>", tpr, t)
+ end
+ setpageres(tpr)
end
- tpr = stringgsub(tpr,"/ExtGState<<","%1"..t)
- texset("global", "pdfpageresources", tpr)
- print(">>", tpr, "<<")
end
res = nil -- reset res
end
- return new_head
+ return head
end
+local color_callback_name = "luaotfload.color_handler"
local color_callback_activated = 0
+local add_to_callback = luatexbase.add_to_callback
+
+--- unit -> bool
+local mlist_to_hlist_initial = function ()
+ local cdesc = luatexbase.callback_descriptions "mlist_to_hlist"
+ return cdesc and cdesc[1] == color_callback_name
+end
--- unit -> unit
add_color_callback = function ( )
- local color_callback = config.luaotfload.run.color_callback
+ color_callback = config.luaotfload.run.color_callback
if not color_callback then
- color_callback = "pre_linebreak_filter"
+ color_callback = "post_linebreak_filter"
end
if color_callback_activated == 0 then
- luatexbase.add_to_callback(color_callback,
- color_handler,
- "luaotfload.color_handler")
+ add_to_callback(color_callback,
+ color_handler,
+ color_callback_name)
+ add_to_callback("hpack_filter",
+ function (head, groupcode)
+ if groupcode == "hbox" or
+ groupcode == "adjusted_hbox" or
+ groupcode == "align_set" then
+ head = color_handler(head)
+ end
+ return head
+ end,
+ color_callback_name)
+ add_to_callback("mlist_to_hlist",
+ function (head, display_type, need_penalties)
+ if mlist_to_hlist_initial () then
+ head = mlist_to_hlist(head, display_type, need_penalties)
+ end
+ if display_type == "text" then
+ return head
+ end
+ return color_handler(head)
+ end,
+ color_callback_name)
color_callback_activated = 1
end
end
+--[[doc--
+``setcolor`` modifies tfmdata.properties.color in place
+--doc]]--
+
+--- fontobj -> string -> unit
+---
+--- (where “string” is a rgb value as three octet
+--- hexadecimal, with an optional fourth transparency
+--- value)
+---
+local setcolor = function (tfmdata, value)
+ local sanitized = sanitize_color_expression(value)
+ local properties = tfmdata.properties
+
+ if sanitized then
+ properties.color = sanitized
+ add_color_callback()
+ end
+end
+
+return {
+ init = function ()
+ logreport = luaotfload.log.report
+ if not fonts then
+ logreport ("log", 0, "color",
+ "OTF mechanisms missing -- did you forget to \z
+ load a font loader?")
+ return false
+ end
+ fonts.handlers.otf.features.register {
+ name = "color",
+ description = "color",
+ initializers = {
+ base = setcolor,
+ node = setcolor,
+ }
+ }
+ return true
+ end
+}
+
+
-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-configuration.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-configuration.lua
index f24f5951f21..5e2800db678 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-configuration.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-configuration.lua
@@ -2,38 +2,33 @@
-------------------------------------------------------------------------------
-- FILE: luaotfload-configuration.lua
-- DESCRIPTION: config file reader
--- REQUIREMENTS: Luaotfload 2.5 or above
--- AUTHOR: Philipp Gesang (Phg), <phg42.2a@gmail.com>
--- VERSION: same as Luaotfload
--- MODIFIED: 2014-07-13 14:19:32+0200
+-- REQUIREMENTS: Luaotfload 2.6 or above
+-- AUTHOR: Philipp Gesang, <phg@phi-gamma.net>
+-- AUTHOR: Dohyun Kim <nomosnomos@gmail.com>
-------------------------------------------------------------------------------
--
if not modules then modules = { } end modules ["luaotfload-configuration"] = {
- version = "2.5",
+ version = "2.6",
comment = "part of Luaotfload",
- author = "Philipp Gesang",
+ author = "Philipp Gesang, Dohyun Kim",
copyright = "Luaotfload Development Team",
license = "GNU GPL v2.0"
}
-luaotfload = luaotfload or { }
-config = config or { }
-config.luaotfload = { }
-
local status_file = "luaotfload-status"
local luaotfloadstatus = require (status_file)
local string = string
-local stringsub = string.sub
-local stringexplode = string.explode
-local stringstrip = string.strip
local stringfind = string.find
+local stringformat = string.format
+local stringstrip = string.strip
+local stringsub = string.sub
-local table = table
local tableappend = table.append
-local tablecopy = table.copy
local tableconcat = table.concat
+local tablecopy = table.copy
+local table = table
local tabletohash = table.tohash
local math = math
@@ -42,8 +37,10 @@ local mathfloor = math.floor
local io = io
local ioloaddata = io.loaddata
local iopopen = io.popen
+local iowrite = io.write
local os = os
+local osdate = os.date
local osgetenv = os.getenv
local lpeg = require "lpeg"
@@ -51,9 +48,7 @@ local lpegmatch = lpeg.match
local commasplitter = lpeg.splitat ","
local equalssplitter = lpeg.splitat "="
-local kpse = kpse
local kpseexpand_path = kpse.expand_path
-local kpselookup = kpse.lookup
local lfs = lfs
local lfsisfile = lfs.isfile
@@ -63,16 +58,12 @@ local file = file
local filejoin = file.join
local filereplacesuffix = file.replacesuffix
+local logreport = print -- overloaded later
+local getwritablepath = caches.getwritablepath
-local parsers = luaotfload.parsers
-
-local log = luaotfload.log
-local logreport = log.report
-
-local config_parser = parsers.config
-local stripslashes = parsers.stripslashes
-local getwritablepath = caches.getwritablepath
+local config_parser -- set later during init
+local stripslashes -- set later during init
-------------------------------------------------------------------------------
--- SETTINGS
@@ -127,6 +118,69 @@ local feature_presets = {
},
}
+--[[doc--
+
+ We allow loading of arbitrary fontloaders. Nevertheless we maintain a
+ list of the “official” ones shipped with Luaotfload so we can emit a
+ different log message.
+
+--doc]]--
+
+local default_fontloader = function ()
+ return luaotfloadstatus and luaotfloadstatus.notes.loader or "reference"
+end
+
+local registered_loaders = {
+ default = default_fontloader (),
+ reference = "reference",
+ unpackaged = "unpackaged",
+ context = "context",
+ tl2014 = "tl2014",
+}
+
+local pick_fontloader = function (s)
+ local ldr = registered_loaders[s]
+ if ldr ~= nil and type (ldr) == "string" then
+ logreport ("log", 2, "conf", "Using predefined fontloader \"%s\".", ldr)
+ return ldr
+ end
+ local idx = stringfind (s, ":")
+ if idx and idx > 2 then
+ if stringsub (s, 1, idx - 1) == "context" then
+ local pth = stringsub (s, idx + 1)
+ pth = stringstrip (pth)
+ logreport ("log", 2, "conf", "Context base path specified at \"%s\".", pth)
+ if lfsisdir (pth) then
+ logreport ("log", 5, "conf", "Context base path exists at \"%s\".", pth)
+ return pth
+ end
+ pth = kpseexpand_path (pth)
+ if lfsisdir (pth) then
+ logreport ("log", 5, "conf", "Context base path exists at \"%s\".", pth)
+ return pth
+ end
+ logreport ("both", 0, "conf", "Context base path not found at \"%s\".", pth)
+ end
+ end
+ return nil
+end
+
+--[[doc--
+
+ The ``post_linebreak_filter`` has been made the default callback for
+ hooking the colorizer into. This helps with the linebreaking whose
+ inserted hyphens would remain unaffected by the coloring otherwise.
+
+ http://tex.stackexchange.com/q/238539/14066
+
+--doc]]--
+
+local permissible_color_callbacks = {
+ default = "post_linebreak_filter",
+ pre_linebreak_filter = "pre_linebreak_filter",
+ post_linebreak_filter = "post_linebreak_filter",
+ pre_output_filter = "pre_output_filter",
+}
-------------------------------------------------------------------------------
@@ -147,8 +201,8 @@ local default_config = {
resolver = "cached",
definer = "patch",
log_level = 0,
- color_callback = "pre_linebreak_filter",
- live = true,
+ color_callback = "post_linebreak_filter",
+ fontloader = default_fontloader (),
},
misc = {
bisect = false,
@@ -258,17 +312,20 @@ local set_name_resolver = function ()
--- replace the resolver from luatex-fonts
if config.luaotfload.db.resolver == "cached" then
logreport ("both", 2, "cache", "Caching of name: lookups active.")
- names.resolvespec = names.resolve_cached
+ names.resolvespec = fonts.names.lookup_font_name_cached
else
- names.resolvespec = names.resolve_name
+ names.resolvespec = fonts.names.lookup_font_name
end
end
return true
end
local set_loglevel = function ()
- log.set_loglevel (config.luaotfload.run.log_level)
- return true
+ if luaotfload then
+ luaotfload.log.set_loglevel (config.luaotfload.run.log_level)
+ return true
+ end
+ return false
end
local build_cache_paths = function ()
@@ -429,7 +486,29 @@ local option_spec = {
definer = {
in_t = string_t,
out_t = string_t,
- transform = function (d) return d == "generic" and d or "patch" end,
+ transform = function (d)
+ if d == "generic" or d == "patch"
+ or d == "info_generic" or d == "info_patch"
+ then
+ return d
+ end
+ return "patch"
+ end,
+ },
+ fontloader = {
+ in_t = string_t,
+ out_t = string_t,
+ transform = function (id)
+ local ldr = pick_fontloader (id)
+ if ldr ~= nil then
+ return ldr
+ end
+ logreport ("log", 0, "conf",
+ "Requested fontloader \"%s\" not defined, "
+ .. "use at your own risk.",
+ id)
+ return id
+ end,
},
log_level = {
in_t = number_t,
@@ -439,9 +518,20 @@ local option_spec = {
color_callback = {
in_t = string_t,
out_t = string_t,
- transform = function (cb)
+ transform = function (cb_spec)
--- These are the two that make sense.
- return cb == "pre_output_filter" and cb or "pre_linebreak_filter"
+ local cb = permissible_color_callbacks[cb_spec]
+ if cb then
+ logreport ("log", 3, "conf",
+ "Using callback \"%s\" for font colorization.",
+ cb)
+ return cb
+ end
+ logreport ("log", 0, "conf",
+ "Requested callback identifier \"%s\" invalid, "
+ .. "falling back to default.",
+ cb_spec)
+ return permissible_color_callbacks.default
end,
},
},
@@ -477,6 +567,115 @@ local option_spec = {
}
-------------------------------------------------------------------------------
+--- FORMATTERS
+-------------------------------------------------------------------------------
+
+local commented = function (str)
+ return ";" .. str
+end
+
+local underscore_replacer = lpeg.replacer ("_", "-", true)
+
+local dashed = function (var)
+ --- INI spec dictates that dashes are valid in variable names, not
+ --- underscores.
+ return underscore_replacer (var) or var
+end
+
+local indent = " "
+local format_string = function (var, val)
+ return stringformat (indent .. "%s = %s", var, val)
+end
+
+local format_integer = function (var, val)
+ return stringformat (indent .. "%s = %d", var, val)
+end
+
+local format_boolean = function (var, val)
+ return stringformat (indent .. "%s = %s", var, val == true and "true" or "false")
+end
+
+local format_keyval = function (var, val)
+ local list = { }
+ local keys = table.sortedkeys (val)
+ for i = 1, #keys do
+ local key = keys[i]
+ local subval = val[key]
+ if subval == true then
+ list[#list + 1] = stringformat ("%s", key)
+ else
+ list[#list + 1] = stringformat ("%s=%s", key, val[key])
+ end
+ end
+ if next (list) then
+ return stringformat (indent .. "%s = %s",
+ var,
+ tableconcat (list, ","))
+ end
+end
+
+local format_section = function (title)
+ return stringformat ("[%s]", dashed (title))
+end
+
+local conf_header = [==[
+;;-----------------------------------------------------------------------------
+;; Luaotfload Configuration
+;;-----------------------------------------------------------------------------
+;;
+;; This file was generated by luaotfload-tool
+;; on %s. Configuration variables
+;; are documented in the manual to luaotfload.conf(5).
+;;
+;;-----------------------------------------------------------------------------
+
+]==]
+
+local conf_footer = [==[
+
+;; vim:filetype=dosini:expandtab:shiftwidth=2
+]==]
+
+--- Each dumpable variable (the ones mentioned in the man page) receives a
+--- formatter that will be used in dumping the variable. Each value receives a
+--- “commented” flag that indicates whether or not the line should be printed
+--- as a comment.
+
+local formatters = {
+ db = {
+ compress = { false, format_boolean },
+ formats = { false, format_string },
+ max_fonts = { false, format_integer },
+ scan_local = { false, format_boolean },
+ skip_read = { false, format_boolean },
+ strip = { false, format_boolean },
+ update_live = { false, format_boolean },
+ },
+ default_features = {
+ __default = { true, format_keyval },
+ },
+ misc = {
+ bisect = { false, format_boolean },
+ statistics = { false, format_boolean },
+ termwidth = { true , format_integer },
+ version = { true , format_string },
+ },
+ paths = {
+ cache_dir = { false, format_string },
+ names_dir = { false, format_string },
+ index_file = { false, format_string },
+ lookups_file = { false, format_string },
+ },
+ run = {
+ color_callback = { false, format_string },
+ definer = { false, format_string },
+ fontloader = { false, format_string },
+ log_level = { false, format_integer },
+ resolver = { false, format_string },
+ },
+}
+
+-------------------------------------------------------------------------------
--- MAIN FUNCTIONALITY
-------------------------------------------------------------------------------
@@ -668,7 +867,7 @@ local read = function (extra)
return false
end
- local parsed = lpegmatch (parsers.config, raw)
+ local parsed = lpegmatch (config_parser, raw)
if not parsed then
logreport ("both", 2, "conf", "Error parsing configuration file %q.", readme)
return false
@@ -691,16 +890,70 @@ local apply_defaults = function ()
return reconfigure ()
end
+local dump = function ()
+ local sections = table.sortedkeys (config.luaotfload)
+ local confdata = { }
+ for i = 1, #sections do
+ local section = sections[i]
+ local vars = config.luaotfload[section]
+ local varnames = table.sortedkeys (vars)
+ local sformats = formatters[section]
+ if sformats then
+ confdata[#confdata + 1] = format_section (section)
+ for j = 1, #varnames do
+ local var = varnames[j]
+ local val = vars[var]
+ local comment, sformat = unpack (sformats[var] or { })
+ if not sformat then
+ comment, sformat = unpack (sformats.__default or { })
+ end
+
+ if sformat then
+ local dashedvar = dashed (var)
+ if comment then
+ confdata[#confdata + 1] = commented (sformat (dashedvar, val))
+ else
+ confdata[#confdata + 1] = sformat (dashedvar, val)
+ end
+ end
+
+ end
+ confdata[#confdata + 1] = ""
+ end
+ end
+ if next(confdata) then
+ iowrite (stringformat (conf_header,
+ osdate ("%Y-%m-d %H:%M:%S", os.time ())))
+ iowrite (tableconcat (confdata, "\n"))
+ iowrite (conf_footer)
+ end
+end
+
-------------------------------------------------------------------------------
--- EXPORTS
-------------------------------------------------------------------------------
-luaotfload.default_config = default_config
-
-config.actions = {
- read = read,
- apply = apply,
- apply_defaults = apply_defaults,
- reconfigure = reconfigure,
+return {
+ init = function ()
+ config.luaotfload = { }
+ logreport = luaotfload.log.report
+ local parsers = luaotfload.parsers
+ config_parser = parsers.config
+ stripslashes = parsers.stripslashes
+
+ luaotfload.default_config = default_config
+ config.actions = {
+ read = read,
+ apply = apply,
+ apply_defaults = apply_defaults,
+ reconfigure = reconfigure,
+ dump = dump,
+ }
+ if not apply_defaults () then
+ logreport ("log", 0, "load",
+ "Configuration unsuccessful: error loading default settings.")
+ end
+ return true
+ end
}
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-database.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-database.lua
index 099c256dde8..367723be841 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-database.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-database.lua
@@ -1,5 +1,5 @@
if not modules then modules = { } end modules ['luaotfload-database'] = {
- version = "2.5",
+ version = "2.6",
comment = "companion to luaotfload-main.lua",
author = "Khaled Hosny, Elie Roux, Philipp Gesang",
copyright = "Luaotfload Development Team",
@@ -39,13 +39,8 @@ if not modules then modules = { } end modules ['luaotfload-database'] = {
local lpeg = require "lpeg"
local P, Cc, lpegmatch = lpeg.P, lpeg.Cc, lpeg.match
-local parsers = luaotfload.parsers
-local read_fonts_conf = parsers.read_fonts_conf
-local stripslashes = parsers.stripslashes
-local splitcomma = parsers.splitcomma
-
local log = luaotfload.log
-local report = log.report
+local logreport = log and log.report or print -- overriden later on
local report_status = log.names_status
local report_status_start = log.names_status_start
local report_status_stop = log.names_status_stop
@@ -119,19 +114,10 @@ local tablefastcopy = table.fastcopy
local tabletofile = table.tofile
local tabletohash = table.tohash
local tableserialize = table.serialize
---- the font loader namespace is “fonts”, same as in Context
---- we need to put some fallbacks into place for when running
---- as a script
-fonts = fonts or { }
-fonts.names = fonts.names or { }
-fonts.definers = fonts.definers or { }
-
-local names = fonts.names
+local names = fonts and fonts.names or { }
+
local name_index = nil --> upvalue for names.data
local lookup_cache = nil --> for names.lookups
-names.version = 2.51
-names.data = nil --- contains the loaded database
-names.lookups = nil --- contains the lookup cache
--- string -> (string * string)
local make_luanames = function (path)
@@ -363,7 +349,7 @@ local initialize_namedata = function (formats, created)
status = { }, -- was: status; map abspath -> mapping
mappings = { }, -- TODO: check if still necessary after rewrite
names = { },
--- files = { }, -- created later
+ files = { }, -- created later
meta = {
created = created or now,
formats = formats,
@@ -454,22 +440,19 @@ end
--- define locals in scope
local access_font_index
local collect_families
-local font_file_lookup
local find_closest
local flush_lookup_cache
local generate_filedata
local get_font_filter
local group_modifiers
-local load_lookups
local load_names
+local lookup_font_name
local getmetadata
local order_design_sizes
local ot_fullinfo
local read_blacklist
local reload_db
-local resolve_cached
-local resolve_fullpath
-local resolve_name
+local lookup_fullpath
local save_lookups
local save_names
local set_font_filter
@@ -482,18 +465,18 @@ local fonts_reloaded = false
--- limit output when approximate font matching (luaotfload-tool -F)
local fuzzy_limit = 1 --- display closest only
---- bool? -> dbobj
-load_names = function (dry_run)
+--- bool? -> -> bool? -> dbobj option
+load_names = function (dry_run, no_rebuild)
local starttime = osgettimeofday ()
local foundname, data = load_lua_file (config.luaotfload.paths.index_path_lua)
if data then
- report ("log", 0, "db",
- "Font names database loaded from %s", foundname)
- report ("term", 3, "db",
- "Font names database loaded from %s", foundname)
- report ("info", 3, "db", "Loading took %0.f ms.",
- 1000 * (osgettimeofday () - starttime))
+ logreport ("log", 0, "db",
+ "Font names database loaded from %s", foundname)
+ logreport ("term", 3, "db",
+ "Font names database loaded from %s", foundname)
+ logreport ("info", 3, "db", "Loading took %0.f ms.",
+ 1000 * (osgettimeofday () - starttime))
local db_version, names_version
if data.meta then
@@ -506,27 +489,32 @@ load_names = function (dry_run)
end
names_version = names.version
if db_version ~= names_version then
- report ("both", 0, "db",
- [[Version mismatch; expected %4.3f, got %4.3f.]],
- names_version, db_version)
+ logreport ("both", 0, "db",
+ [[Version mismatch; expected %4.3f, got %4.3f.]],
+ names_version, db_version)
if not fonts_reloaded then
- report ("both", 0, "db", [[Force rebuild.]])
+ logreport ("both", 0, "db", [[Force rebuild.]])
data = update_names ({ }, true, false)
if not data then
- report ("both", 0, "db",
- "Database creation unsuccessful.")
+ logreport ("both", 0, "db",
+ "Database creation unsuccessful.")
end
end
end
else
- report ("both", 0, "db",
- [[Font names database not found, generating new one.]])
- report ("both", 0, "db",
- [[This can take several minutes; please be patient.]])
+ if no_rebuild == true then
+ logreport ("both", 2, "db",
+ [[Database does not exist, skipping rebuild though.]])
+ return false
+ end
+ logreport ("both", 0, "db",
+ [[Font names database not found, generating new one.]])
+ logreport ("both", 0, "db",
+ [[This can take several minutes; please be patient.]])
data = update_names (initialize_namedata (get_font_filter ()),
nil, dry_run)
if not data then
- report ("both", 0, "db", "Database creation unsuccessful.")
+ logreport ("both", 0, "db", "Database creation unsuccessful.")
end
end
return data
@@ -545,20 +533,24 @@ access_font_index = function ()
end
getmetadata = function ()
- if not name_index then name_index = load_names() end
- return tablefastcopy (name_index.meta)
+ if not name_index then
+ name_index = load_names (false, true)
+ if name_index then return tablefastcopy (name_index.meta) end
+ end
+ return false
end
--- unit -> unit
+local load_lookups
load_lookups = function ( )
local foundname, data = load_lua_file(config.luaotfload.paths.lookup_path_lua)
if data then
- report("log", 0, "cache", "Lookup cache loaded from %s.", foundname)
- report("term", 3, "cache",
- "Lookup cache loaded from %s.", foundname)
+ logreport ("log", 0, "cache", "Lookup cache loaded from %s.", foundname)
+ logreport ("term", 3, "cache",
+ "Lookup cache loaded from %s.", foundname)
else
- report("both", 1, "cache",
- "No lookup cache, creating empty.")
+ logreport ("both", 1, "cache",
+ "No lookup cache, creating empty.")
data = { }
end
lookup_cache = data
@@ -630,7 +622,7 @@ end
--[[doc--
- font_file_lookup -- The ``file:`` are ultimately delegated here.
+ lookup_font_file -- The ``file:`` are ultimately delegated here.
The lookups are kind of a blunt instrument since they try locating
the file using every conceivable method, which is quite
inefficient. Nevertheless, resolving files that way is rarely the
@@ -639,7 +631,8 @@ end
--doc]]--
--- string -> string * string * bool
-font_file_lookup = function (filename)
+local lookup_font_file
+lookup_font_file = function (filename)
local found = lookup_filename (filename)
if not found then
@@ -659,7 +652,7 @@ font_file_lookup = function (filename)
if not fonts_reloaded and config.luaotfload.db.update_live == true then
return reload_db (stringformat ("File not found: %s.", filename),
- font_file_lookup,
+ lookup_font_file,
filename)
end
return filename, nil, false
@@ -707,7 +700,7 @@ font managment we have to check both the system path and the texmf.
--doc]]--
local verify_font_file = function (basename)
- local path = resolve_fullpath (basename)
+ local path = lookup_fullpath (basename)
if path and lfsisfile(path) then
return true
end
@@ -740,7 +733,7 @@ Idk what the “spec” resolver is for.
spec: name, sub resolved, sub, name, forced
[*] name: contains both the name resolver from luatex-fonts and
- resolve_name() below
+ lookup_font_name () below
From my reading of font-def.lua, what a resolver does is
basically rewrite the “name” field of the specification record
@@ -769,46 +762,48 @@ local hash_request = function (specification)
end
--- 'a -> 'a -> table -> (string * int|boolean * boolean)
-resolve_cached = function (specification)
+local lookup_font_name_cached
+lookup_font_name_cached = function (specification)
if not lookup_cache then load_lookups () end
local request = hash_request(specification)
- report("both", 4, "cache", "Looking for %q in cache ...",
- request)
+ logreport ("both", 4, "cache", "Looking for %q in cache ...",
+ request)
local found = lookup_cache [request]
--- case 1) cache positive ----------------------------------------
if found then --- replay fields from cache hit
- report("info", 4, "cache", "Found!")
+ logreport ("info", 4, "cache", "Found!")
local basename = found[1]
--- check the presence of the file in case it’s been removed
local success = verify_font_file (basename)
if success == true then
return basename, found[2], true
end
- report("both", 4, "cache", "Cached file not found; resolving again.")
+ logreport ("both", 4, "cache",
+ "Cached file not found; resolving again.")
else
- report("both", 4, "cache", "Not cached; resolving.")
+ logreport ("both", 4, "cache", "Not cached; resolving.")
end
--- case 2) cache negative ----------------------------------------
--- first we resolve normally ...
- local filename, subfont = resolve_name (specification)
+ local filename, subfont = lookup_font_name (specification)
if not filename then
return nil, nil
end
--- ... then we add the fields to the cache ... ...
local entry = { filename, subfont }
- report("both", 4, "cache", "New entry: %s.", request)
+ logreport ("both", 4, "cache", "New entry: %s.", request)
lookup_cache [request] = entry
--- obviously, the updated cache needs to be stored.
--- TODO this should trigger a save only once the
--- document is compiled (finish_pdffile callback?)
- report("both", 5, "cache", "Saving updated cache.")
+ logreport ("both", 5, "cache", "Saving updated cache.")
local success = save_lookups ()
if not success then --- sad, but not critical
- report("both", 0, "cache", "Error writing cache.")
+ logreport ("both", 0, "cache", "Error writing cache.")
end
return filename, subfont
end
@@ -927,13 +922,13 @@ end
--[[doc--
- resolve_familyname -- Query the families table for an entry
+ lookup_familyname -- Query the families table for an entry
matching the specification.
The parameters “name” and “style” are pre-sanitized.
--doc]]--
--- spec -> string -> string -> int -> string * int
-local resolve_familyname = function (specification, name, style, askedsize)
+local lookup_familyname = function (specification, name, style, askedsize)
local families = name_index.families
local mappings = name_index.mappings
local candidates = nil
@@ -965,12 +960,12 @@ local resolve_familyname = function (specification, name, style, askedsize)
if not success then
return nil, nil
end
- report ("info", 2, "db", "Match found: %s(%d).",
- resolved, subfont or 0)
+ logreport ("info", 2, "db", "Match found: %s(%d).",
+ resolved, subfont or 0)
return resolved, subfont
end
-local resolve_fontname = function (specification, name, style)
+local lookup_fontname = function (specification, name, style)
local mappings = name_index.mappings
local fallback = nil
local lastresort = nil
@@ -1013,7 +1008,7 @@ end
--[[doc--
- resolve_name -- Perform a name: lookup. This first queries the
+ lookup_font_name -- Perform a name: lookup. This first queries the
font families table and, if there is no match for the spec, the
font names table.
The return value is a pair consisting of the file name and the
@@ -1051,7 +1046,7 @@ end
multiple design sizes to a given font/style combination, we put a
workaround in place that chooses that unmarked version.
- The first return value of “resolve_name” is the file name of the
+ The first return value of “lookup_font_name” is the file name of the
requested font (string). It can be passed to the fullname resolver
get_font_file().
The second value is either “false” or an integer indicating the
@@ -1060,7 +1055,7 @@ end
--doc]]--
--- table -> string * (int | bool)
-resolve_name = function (specification)
+lookup_font_name = function (specification)
local resolved, subfont
if not name_index then name_index = load_names () end
local name = sanitize_fontname (specification.name)
@@ -1078,28 +1073,28 @@ resolve_name = function (specification)
end
end
- resolved, subfont = resolve_familyname (specification,
- name,
- style,
- askedsize)
+ resolved, subfont = lookup_familyname (specification,
+ name,
+ style,
+ askedsize)
if not resolved then
- resolved, subfont = resolve_fontname (specification,
- name,
- style)
+ resolved, subfont = lookup_fontname (specification,
+ name,
+ style)
end
if not resolved then
if not fonts_reloaded and config.luaotfload.db.update_live == true then
return reload_db (stringformat ("Font %s not found.",
specification.name or "<?>"),
- resolve_name,
+ lookup_font_name,
specification)
end
end
return resolved, subfont
end
-resolve_fullpath = function (fontname, ext) --- getfilename()
+lookup_fullpath = function (fontname, ext) --- getfilename()
if not name_index then name_index = load_names () end
local files = name_index.files
local basedata = files.base
@@ -1132,9 +1127,9 @@ reload_db = function (why, caller, ...)
local namedata = name_index
local formats = tableconcat (namedata.meta.formats, ",")
- report ("both", 0, "db",
- "Reload initiated (formats: %s); reason: %q.",
- formats, why)
+ logreport ("both", 0, "db",
+ "Reload initiated (formats: %s); reason: %q.",
+ formats, why)
set_font_filter (formats)
namedata = update_names (namedata, false, false)
@@ -1145,7 +1140,7 @@ reload_db = function (why, caller, ...)
return caller (...)
end
- report ("both", 0, "db", "Database update unsuccessful.")
+ logreport ("both", 0, "db", "Database update unsuccessful.")
end
--- string -> string -> int
@@ -1176,6 +1171,25 @@ local iterative_levenshtein = function (s1, s2)
return costs[len2]--- lower right has the distance
end
+--- string list -> string list
+local delete_dupes = function (lst)
+ local n0 = #lst
+ if n0 == 0 then return lst end
+ tablesort (lst)
+ local ret = { }
+ local last
+ for i = 1, n0 do
+ local cur = lst[i]
+ if cur ~= last then
+ last = cur
+ ret[#ret + 1] = cur
+ end
+ end
+ logreport (false, 8, "query",
+ "Removed %d duplicate names.", n0 - #ret)
+ return ret
+end
+
--- string -> int -> bool
find_closest = function (name, limit)
local name = sanitize_fontname (name)
@@ -1221,6 +1235,7 @@ find_closest = function (name, limit)
else --- append
namelst[#namelst+1] = fullname
end
+
by_distance[dist] = namelst
end
@@ -1229,16 +1244,16 @@ find_closest = function (name, limit)
if n_distances > 0 then --- got some data
tablesort(distances)
limit = mathmin(n_distances, limit)
- report(false, 1, "query",
- "Displaying %d distance levels.", limit)
+ logreport (false, 1, "query",
+ "Displaying %d distance levels.", limit)
for i = 1, limit do
local dist = distances[i]
- local namelst = by_distance[dist]
- report(false, 0, "query",
- "Distance from \"%s\": %s\n "
- .. tableconcat (namelst, "\n "),
- name, dist)
+ local namelst = delete_dupes (by_distance[dist])
+ logreport (false, 0, "query",
+ "Distance from \"%s\": %s\n "
+ .. tableconcat (namelst, "\n "),
+ name, dist)
end
return true
@@ -1265,7 +1280,7 @@ local load_font_file = function (filename, subfont)
local rawfont, _msg = fontloaderopen (filename, subfont)
--local rawfont, _msg = fontloaderinfo (filename, subfont)
if not rawfont then
- report ("log", 1, "db", "ERROR: failed to open %s.", filename)
+ logreport ("log", 1, "db", "ERROR: failed to open %s.", filename)
return
end
return rawfont
@@ -1308,8 +1323,8 @@ local get_english_names = function (metadata)
end
-- no (English) names table, probably a broken font
- report("both", 3, "db",
- "%s: missing or broken English names table.", basename)
+ logreport ("both", 3, "db",
+ "%s: missing or broken English names table.", basename)
return { fontname = metadata.fontname,
fullname = metadata.fullname, }
end
@@ -1335,9 +1350,9 @@ local get_raw_info = function (metadata, basename)
--- Broken names table, e.g. avkv.ttf with UTF-16 strings;
--- we put some dummies in place like the fontloader
--- (font-otf.lua) does.
- report("both", 3, "db",
- "%s has invalid postscript font names, using dummies.",
- basename)
+ logreport ("both", 3, "db",
+ "%s has invalid postscript font names, using dummies.",
+ basename)
fontname = "bad-fontname-" .. basename
fullname = "bad-fullname-" .. basename
end
@@ -1611,7 +1626,7 @@ local compare_timestamps = function (fullname,
if targetentrystatus ~= nil
and targetentrystatus.timestamp == targettimestamp then
- report ("log", 3, "db", "Font %q already read.", fullname)
+ logreport ("log", 3, "db", "Font %q already read.", fullname)
return false
end
@@ -1633,7 +1648,7 @@ local compare_timestamps = function (fullname,
targetentrystatus.index [targetindex + 1] = location
end
- report ("log", 3, "db", "Font %q already indexed.", fullname)
+ logreport ("log", 3, "db", "Font %q already indexed.", fullname)
return false
end
@@ -1713,8 +1728,8 @@ local read_font_names = function (fullname,
--- 1) skip if blacklisted
if names.blacklist[fullname] or names.blacklist[basename] then
- report("log", 2, "db",
- "Ignoring blacklisted font %q.", fullname)
+ logreport ("log", 2, "db",
+ "Ignoring blacklisted font %q.", fullname)
return false
end
@@ -1737,8 +1752,8 @@ local read_font_names = function (fullname,
local loader = loaders [format] --- ot_fullinfo, t1_fullinfo
if not loader then
- report ("both", 0, "db",
- "Unknown format: %q, skipping.", format)
+ logreport ("both", 0, "db",
+ "Unknown format: %q, skipping.", format)
return false
end
@@ -1747,8 +1762,8 @@ local read_font_names = function (fullname,
local info = fontloaderinfo (fullname)
if not info then
- report ("log", 1, "db",
- "Failed to read basic information from %q", basename)
+ logreport ("log", 1, "db",
+ "Failed to read basic information from %q", basename)
return false
end
@@ -1820,11 +1835,7 @@ do
end
end
-fonts.path_normalize = path_normalize
-
-names.blacklist = { }
-
-local blacklist = names.blacklist
+local blacklist = { }
local p_blacklist --- prefixes of dirs
--- string list -> string list
@@ -1853,8 +1864,8 @@ local create_blacklist = function (blacklist, whitelist)
local result = { }
local dirs = { }
- report("info", 2, "db", "Blacklisting %d files and directories.",
- #blacklist)
+ logreport ("info", 2, "db", "Blacklisting %d files and directories.",
+ #blacklist)
for i=1, #blacklist do
local entry = blacklist[i]
if lfsisdir(entry) then
@@ -1864,7 +1875,7 @@ local create_blacklist = function (blacklist, whitelist)
end
end
- report("info", 2, "db", "Whitelisting %d files.", #whitelist)
+ logreport ("info", 2, "db", "Whitelisting %d files.", #whitelist)
for i=1, #whitelist do
result[whitelist[i]] = nil
end
@@ -1906,9 +1917,9 @@ read_blacklist = function ()
if first_chr == "%" or stringis_empty(line) then
-- comment or empty line
elseif first_chr == "-" then
- report ("both", 3, "db",
- "Whitelisted file %q via %q.",
- line, path)
+ logreport ("both", 3, "db",
+ "Whitelisted file %q via %q.",
+ line, path)
whitelist[#whitelist+1] = stringsub(line, 2, -1)
else
local cmt = stringfind(line, "%%")
@@ -1916,9 +1927,9 @@ read_blacklist = function ()
line = stringsub(line, 1, cmt - 1)
end
line = stringstrip(line)
- report ("both", 3, "db",
- "Blacklisted file %q via %q.",
- line, path)
+ logreport ("both", 3, "db",
+ "Blacklisted file %q via %q.",
+ line, path)
blacklist[#blacklist+1] = line
end
end
@@ -1930,9 +1941,8 @@ end
local p_font_filter
do
- local current_formats = { }
-
local extension_pattern = function (list)
+ if type (list) ~= "table" or #list == 0 then return P(-1) end
local pat
for i=#list, 1, -1 do
local e = list[i]
@@ -1949,12 +1959,17 @@ do
--- small helper to adjust the font filter pattern (--formats
--- option)
+ local current_formats = { }
+
set_font_filter = function (formats)
if not formats or type (formats) ~= "string" then
return
end
+ if splitcomma == nil then
+ splitcomma = luaotfload.parsers and luaotfload.parsers.splitcomma
+ end
if stringsub (formats, 1, 1) == "+" then -- add
formats = lpegmatch (splitcomma, stringsub (formats, 2))
if formats then
@@ -2115,24 +2130,24 @@ end
--- string -> string -> string * string list
local collect_font_filenames_dir = function (dirname, location)
if lpegmatch (p_blacklist, dirname) then
- report ("both", 4, "db",
- "Skipping blacklisted directory %s.", dirname)
+ logreport ("both", 4, "db",
+ "Skipping blacklisted directory %s.", dirname)
--- ignore
return { }
end
local found = find_font_files (dirname, location ~= "texmf" and location ~= "local")
if not found then
- report ("both", 4, "db",
- "No such directory: %q; skipping.", dirname)
+ logreport ("both", 4, "db",
+ "No such directory: %q; skipping.", dirname)
return { }
end
local nfound = #found
local files = { }
- report ("both", 4, "db",
- "%d font files detected in %s.",
- nfound, dirname)
+ logreport ("both", 4, "db",
+ "%d font files detected in %s.",
+ nfound, dirname)
for j = 1, nfound do
local fullname = found[j]
files[#files + 1] = { path_normalize (fullname), location }
@@ -2140,23 +2155,29 @@ local collect_font_filenames_dir = function (dirname, location)
return files
end
-
--- string list -> string list
local filter_out_pwd = function (dirs)
local result = { }
+ if stripslashes == nil then
+ stripslashes = luaotfload.parsers and luaotfload.parsers.stripslashes
+ end
local pwd = path_normalize (lpegmatch (stripslashes,
lfscurrentdir ()))
for i = 1, #dirs do
--- better safe than sorry
local dir = path_normalize (lpegmatch (stripslashes, dirs[i]))
- if not (dir == "." or dir == pwd) then
+ if dir == "." or dir == pwd then
+ logreport ("both", 3, "db",
+ "Path “%s” matches $PWD (“%s”), skipping.",
+ dir, pwd)
+ else
result[#result+1] = dir
end
end
return result
end
-local path_separator = ostype == "windows" and ";" or ":"
+local path_separator = os.type == "windows" and ";" or ":"
--[[doc--
@@ -2176,14 +2197,14 @@ local collect_font_filenames_texmf = function ()
local osfontdir = kpseexpand_path "$OSFONTDIR"
if stringis_empty (osfontdir) then
- report ("info", 1, "db", "Scanning TEXMF for fonts...")
+ logreport ("both", 1, "db", "Scanning TEXMF for fonts...")
else
- report ("info", 1, "db", "Scanning TEXMF and $OSFONTDIR for fonts...")
+ logreport ("both", 1, "db", "Scanning TEXMF and $OSFONTDIR for fonts...")
if log.get_loglevel () > 3 then
local osdirs = filesplitpath (osfontdir)
- report ("info", 0, "db", "$OSFONTDIR has %d entries:", #osdirs)
+ logreport ("both", 0, "db", "$OSFONTDIR has %d entries:", #osdirs)
for i = 1, #osdirs do
- report ("info", 0, "db", "[%d] %s", i, osdirs[i])
+ logreport ("both", 0, "db", "[%d] %s", i, osdirs[i])
end
end
end
@@ -2197,14 +2218,14 @@ local collect_font_filenames_texmf = function ()
end
local tasks = filter_out_pwd (filesplitpath (fontdirs))
- report ("info", 3, "db",
- "Initiating scan of %d directories.", #tasks)
+ logreport ("both", 3, "db",
+ "Initiating scan of %d directories.", #tasks)
local files = { }
for _, dir in next, tasks do
files = tableappend (files, collect_font_filenames_dir (dir, "texmf"))
end
- report ("term", 3, "db", "Collected %d files.", #files)
+ logreport ("both", 3, "db", "Collected %d files.", #files)
return files
end
@@ -2225,7 +2246,10 @@ local function get_os_dirs ()
"/usr/local/etc/fonts/fonts.conf",
"/etc/fonts/fonts.conf",
}
- local os_dirs = read_fonts_conf(fonts_conves, find_files)
+ if not luaotfload.parsers then
+ logreport ("log", 0, "db", "Fatal: no fonts.conf parser.")
+ end
+ local os_dirs = luaotfload.parsers.read_fonts_conf(fonts_conves, find_files)
return os_dirs
end
return {}
@@ -2233,6 +2257,33 @@ end
--[[doc--
+ count_removed -- Count paths that do not exist in the file system.
+
+--doc]]--
+
+--- string list -> size_t
+local count_removed = function (files)
+ if not files or not files.full then
+ logreport ("log", 4, "db", "Empty file store; no data to work with.")
+ return 0
+ end
+ local old = files.full
+ logreport ("log", 4, "db", "Checking removed files.")
+ local nrem = 0
+ local nold = #old
+ for i = 1, nold do
+ local f = old[i]
+ if not kpsereadable_file (f) then
+ logreport ("log", 2, "db",
+ "File %s does not exist in file system.")
+ nrem = nrem + 1
+ end
+ end
+ return nrem
+end
+
+--[[doc--
+
retrieve_namedata -- Scan the list of collected fonts and populate
the list of namedata.
@@ -2245,13 +2296,13 @@ end
--doc]]--
---- string * string list -> dbobj -> dbobj -> bool? -> int
+--- string * string list -> dbobj -> dbobj -> bool? -> int * int
local retrieve_namedata = function (files, currentnames, targetnames, dry_run)
local nfiles = #files
local nnew = 0
- report ("info", 1, "db", "Scanning %d collected font files ...", nfiles)
+ logreport ("info", 1, "db", "Scanning %d collected font files ...", nfiles)
local bylocation = { texmf = { 0, 0 }
, ["local"] = { 0, 0 }
@@ -2264,12 +2315,12 @@ local retrieve_namedata = function (files, currentnames, targetnames, dry_run)
count[1] = count[1] + 1
if dry_run == true then
local truncated = truncate_string (fullname, 43)
- report ("log", 2, "db", "Would have been loading %s.", fullname)
+ logreport ("log", 2, "db", "Would have been loading %s.", fullname)
report_status ("term", "db", "Would have been loading %s", truncated)
--- skip the read_font_names part
else
local truncated = truncate_string (fullname, 32)
- report ("log", 2, "db", "Loading font %s.", fullname)
+ logreport ("log", 2, "db", "Loading font %s.", fullname)
report_status ("term", "db", "Loading font %s", truncated)
local new = read_font_names (fullname, currentnames,
targetnames, location)
@@ -2281,8 +2332,8 @@ local retrieve_namedata = function (files, currentnames, targetnames, dry_run)
end
report_status_stop ("term", "db", "Scanned %d files, %d new.", nfiles, nnew)
for location, count in next, bylocation do
- report ("term", 4, "db", " * %s: %d files, %d new",
- location, count[1], count[2])
+ logreport ("term", 4, "db", " * %s: %d files, %d new",
+ location, count[1], count[2])
end
return nnew
end
@@ -2291,15 +2342,15 @@ end
local collect_font_filenames_system = function ()
local n_scanned, n_new = 0, 0
- report ("info", 1, "db", "Scanning system fonts...")
- report ("info", 2, "db",
- "Searching in static system directories...")
+ logreport ("info", 1, "db", "Scanning system fonts...")
+ logreport ("info", 2, "db",
+ "Searching in static system directories...")
local files = { }
for _, dir in next, get_os_dirs () do
tableappend (files, collect_font_filenames_dir (dir, "system"))
end
- report ("term", 3, "db", "Collected %d files.", #files)
+ logreport ("term", 3, "db", "Collected %d files.", #files)
return files
end
@@ -2325,28 +2376,25 @@ end
--- unit -> string * string list
local collect_font_filenames_local = function ()
local pwd = lfscurrentdir ()
- report ("both", 1, "db", "Scanning for fonts in $PWD (%q) ...", pwd)
+ logreport ("both", 1, "db", "Scanning for fonts in $PWD (%q) ...", pwd)
local files = collect_font_filenames_dir (pwd, "local")
local nfiles = #files
if nfiles > 0 then
targetnames.meta["local"] = true --- prevent saving to disk
- report ("term", 1, "db", "Found %d files.", pwd)
+ logreport ("term", 1, "db", "Found %d files.", pwd)
else
- report ("term", 1, "db",
- "Couldn’t find a thing here. What a waste.", pwd)
+ logreport ("term", 1, "db",
+ "Couldn’t find a thing here. What a waste.", pwd)
end
- report ("term", 3, "db", "Collected %d files.", #files)
+ logreport ("term", 3, "db", "Collected %d files.", #files)
return files
end
---- dbobj -> dbobj -> int * int
-
--- fontentry list -> filemap
-
generate_filedata = function (mappings)
- report ("both", 2, "db", "Creating filename map.")
+ logreport ("both", 2, "db", "Creating filename map.")
local nmappings = #mappings
@@ -2375,7 +2423,6 @@ generate_filedata = function (mappings)
for index = 1, nmappings do
local entry = mappings [index]
-
local filedata = entry.file
local format
local location
@@ -2409,10 +2456,10 @@ generate_filedata = function (mappings)
if inbase then
local present = inbase [basename]
if present then
- report ("both", 4, "db",
- "Conflicting basename: %q already indexed \z
- in category %s, ignoring.",
- barename, location)
+ logreport ("both", 4, "db",
+ "Conflicting basename: %q already indexed \z
+ in category %s, ignoring.",
+ barename, location)
conflicts.basenames = conflicts.basenames + 1
--- track conflicts per font
@@ -2439,10 +2486,10 @@ generate_filedata = function (mappings)
if inbare then
local present = inbare [barename]
if present then
- report ("both", 4, "db",
- "Conflicting barename: %q already indexed \z
- in category %s/%s, ignoring.",
- barename, location, format)
+ logreport ("both", 4, "db",
+ "Conflicting barename: %q already indexed \z
+ in category %s/%s, ignoring.",
+ barename, location, format)
conflicts.barenames = conflicts.barenames + 1
--- track conflicts per font
@@ -2465,7 +2512,7 @@ generate_filedata = function (mappings)
--- 3) add to fullpath map
full [index] = fullpath
- end
+ end --- mapping traversal
return files
end
@@ -2624,7 +2671,7 @@ end
collect_families = function (mappings)
- report ("info", 2, "db", "Analyzing families.")
+ logreport ("info", 2, "db", "Analyzing families.")
local families = {
["local"] = { },
@@ -2720,7 +2767,7 @@ local style_categories = { "r", "b", "i", "bi" }
local bold_categories = { "b", "bi" }
group_modifiers = function (mappings, families)
- report ("info", 2, "db", "Analyzing shapes, weights, and styles.")
+ logreport ("info", 2, "db", "Analyzing shapes, weights, and styles.")
for location, location_data in next, families do
for format, format_data in next, location_data do
for familyname, collected in next, format_data do
@@ -2819,7 +2866,7 @@ end
order_design_sizes = function (families)
- report ("info", 2, "db", "Ordering design sizes.")
+ logreport ("info", 2, "db", "Ordering design sizes.")
for location, data in next, families do
for format, data in next, data do
@@ -2841,10 +2888,10 @@ end
--doc]]--
---- unit -> string * bool list
+--- unit -> string * string list
local collect_font_filenames = function ()
- report ("info", 4, "db", "Scanning the filesystem for font files.")
+ logreport ("info", 4, "db", "Scanning the filesystem for font files.")
local filenames = { }
local bisect = config.luaotfload.misc.bisect
@@ -2874,7 +2921,7 @@ end
--- int -> string
local nth_font_filename = function (n)
- report ("info", 4, "db", "Picking font file no. %d.", n)
+ logreport ("info", 4, "db", "Picking font file no. %d.", n)
if not p_blacklist then
read_blacklist ()
end
@@ -2889,7 +2936,7 @@ end
--doc]]--
local font_slice = function (lo, hi)
- report ("info", 4, "db", "Retrieving font files nos. %d--%d.", lo, hi)
+ logreport ("info", 4, "db", "Retrieving font files nos. %d--%d.", lo, hi)
if not p_blacklist then
read_blacklist ()
end
@@ -2911,7 +2958,7 @@ end
--- unit -> int
local count_font_files = function ()
- report ("info", 4, "db", "Counting font files.")
+ logreport ("info", 4, "db", "Counting font files.")
if not p_blacklist then
read_blacklist ()
end
@@ -3037,31 +3084,31 @@ local collect_statistics = function (mappings)
itemlist = tableconcat (itemlist, ", ")
end
- report ("both", 0, "db",
- " · %4d × %s.",
- freq, itemlist)
+ logreport ("both", 0, "db",
+ " · %4d × %s.",
+ freq, itemlist)
end
end
- report ("both", 0, "", "~~~~ font index statistics ~~~~")
- report ("both", 0, "db",
- " · Collected %d fonts (%d names) in %d families.",
- #mappings, n_fullname, n_family)
+ logreport ("both", 0, "", "~~~~ font index statistics ~~~~")
+ logreport ("both", 0, "db",
+ " · Collected %d fonts (%d names) in %d families.",
+ #mappings, n_fullname, n_family)
pprint_top (families, 4, true)
- report ("both", 0, "db",
- " · %d different “subfamily” kinds.",
- setsize (subfamily))
+ logreport ("both", 0, "db",
+ " · %d different “subfamily” kinds.",
+ setsize (subfamily))
pprint_top (subfamily, 4)
- report ("both", 0, "db",
- " · %d different “prefmodifiers” kinds.",
- setsize (prefmodifiers))
+ logreport ("both", 0, "db",
+ " · %d different “prefmodifiers” kinds.",
+ setsize (prefmodifiers))
pprint_top (prefmodifiers, 4)
- report ("both", 0, "db",
- " · %d different “fontstyle_name” kinds.",
- setsize (fontstyle_name))
+ logreport ("both", 0, "db",
+ " · %d different “fontstyle_name” kinds.",
+ setsize (fontstyle_name))
pprint_top (fontstyle_name, 4)
end
@@ -3090,10 +3137,12 @@ end
--- dbobj? -> bool? -> bool? -> dbobj
update_names = function (currentnames, force, dry_run)
local targetnames
+ local n_new = 0
+ local n_rem = 0
local conf = config.luaotfload
if conf.run.live ~= false and conf.db.update_live == false then
- report ("info", 2, "db", "Skipping database update.")
+ logreport ("info", 2, "db", "Skipping database update.")
--- skip all db updates
return currentnames or name_index
end
@@ -3105,15 +3154,16 @@ update_names = function (currentnames, force, dry_run)
- “targetnames” is the final table to return
- force is whether we rebuild it from scratch or not
]]
- report("both", 1, "db", "Updating the font names database"
- .. (force and " forcefully." or "."))
+ logreport ("both", 1, "db",
+ "Updating the font names database"
+ .. (force and " forcefully." or "."))
if config.luaotfload.db.skip_read == true then
--- the difference to a “dry run” is that we don’t search
--- for font files entirely. we also ignore the “force”
--- parameter since it concerns only the font files.
- report ("info", 2, "db",
- "Ignoring font files, reusing old data.")
+ logreport ("info", 2, "db",
+ "Ignoring font files, reusing old data.")
currentnames = load_names (false)
targetnames = currentnames
else
@@ -3124,8 +3174,9 @@ update_names = function (currentnames, force, dry_run)
currentnames = load_names (dry_run)
end
if currentnames.meta.version ~= names.version then
- report ("both", 1, "db", "No font names database or old "
- .. "one found; generating new one.")
+ logreport ("both", 1, "db",
+ "No font names database or old \z
+ one found; generating new one.")
currentnames = initialize_namedata (get_font_filter ())
end
end
@@ -3141,13 +3192,16 @@ update_names = function (currentnames, force, dry_run)
--- pass 2: read font files (normal case) or reuse information
--- present in index
+ n_rem = count_removed (currentnames.files)
+
n_new = retrieve_namedata (font_filenames,
currentnames,
targetnames,
dry_run)
- report ("info", 3, "db",
- "Found %d font files; %d new entries.",
- #font_filenames, n_new)
+
+ logreport ("info", 3, "db",
+ "Found %d font files; %d new, %d stale entries.",
+ #font_filenames, n_new, n_rem)
end
--- pass 3 (optional): collect some stats about the raw font info
@@ -3173,27 +3227,27 @@ update_names = function (currentnames, force, dry_run)
--- pass 7: order design size tables
targetnames.families = order_design_sizes (targetnames.families)
-
- report ("info", 3, "db",
- "Rebuilt in %0.f ms.",
- 1000 * (osgettimeofday () - starttime))
+ logreport ("info", 3, "db",
+ "Rebuilt in %0.f ms.",
+ 1000 * (osgettimeofday () - starttime))
name_index = targetnames
if dry_run ~= true then
- if n_new == 0 then
- report ("info", 2, "db", "No new fonts found, skip saving to disk.")
+ if n_new + n_rem == 0 then
+ logreport ("info", 2, "db",
+ "No new or removed fonts, skip saving to disk.")
else
local success, reason = save_names ()
if not success then
- report ("both", 0, "db",
- "Failed to save database to disk: %s",
- reason)
+ logreport ("both", 0, "db",
+ "Failed to save database to disk: %s",
+ reason)
end
end
if flush_lookup_cache () and save_lookups () then
- report ("both", 2, "cache", "Lookup cache emptied.")
+ logreport ("both", 2, "cache", "Lookup cache emptied.")
return targetnames
end
end
@@ -3210,18 +3264,18 @@ save_lookups = function ( )
caches.compile (lookup_cache, luaname, lucname)
--- double check ...
if lfsisfile (luaname) and lfsisfile (lucname) then
- report ("both", 3, "cache", "Lookup cache saved.")
+ logreport ("both", 3, "cache", "Lookup cache saved.")
return true
end
- report ("info", 0, "cache", "Could not compile lookup cache.")
+ logreport ("info", 0, "cache", "Could not compile lookup cache.")
return false
end
- report ("info", 0, "cache", "Lookup cache file not writable.")
+ logreport ("info", 0, "cache", "Lookup cache file not writable.")
if not fileiswritable (luaname) then
- report ("info", 0, "cache", "Failed to write %s.", luaname)
+ logreport ("info", 0, "cache", "Failed to write %s.", luaname)
end
if not fileiswritable (lucname) then
- report ("info", 0, "cache", "Failed to write %s.", lucname)
+ logreport ("info", 0, "cache", "Failed to write %s.", lucname)
end
return false
end
@@ -3250,33 +3304,33 @@ save_names = function (currentnames)
tabletofile (luaname, currentnames, true)
caches.compile (currentnames, luaname, lucname)
end
- report ("info", 2, "db", "Font index saved at ...")
+ logreport ("info", 2, "db", "Font index saved at ...")
local success = false
if lfsisfile (luaname) then
- report ("info", 2, "db", "Text: " .. luaname)
+ logreport ("info", 2, "db", "Text: " .. luaname)
success = true
end
if lfsisfile (gzname) then
- report ("info", 2, "db", "Gzip: " .. gzname)
+ logreport ("info", 2, "db", "Gzip: " .. gzname)
success = true
end
if lfsisfile (lucname) then
- report ("info", 2, "db", "Byte: " .. lucname)
+ logreport ("info", 2, "db", "Byte: " .. lucname)
success = true
end
if success then
return true
else
- report ("info", 0, "db", "Could not compile font index.")
+ logreport ("info", 0, "db", "Could not compile font index.")
return false
end
end
- report ("info", 0, "db", "Index file not writable")
+ logreport ("info", 0, "db", "Index file not writable")
if not fileiswritable (luaname) then
- report ("info", 0, "db", "Failed to write %s.", luaname)
+ logreport ("info", 0, "db", "Failed to write %s.", luaname)
end
if not fileiswritable (lucname) then
- report ("info", 0, "db", "Failed to write %s.", lucname)
+ logreport ("info", 0, "db", "Failed to write %s.", lucname)
end
return false
end
@@ -3290,7 +3344,7 @@ end
--- string -> string -> string list -> string list -> string list -> unit
local print_cache = function (category, path, luanames, lucnames, rest)
local report_indeed = function (...)
- report("info", 0, "cache", ...)
+ logreport ("info", 0, "cache", ...)
end
report_indeed("Luaotfload cache: %s", category)
report_indeed("location: %s", path)
@@ -3302,15 +3356,15 @@ end
--- string -> string -> string list -> bool -> bool
local purge_from_cache = function (category, path, list, all)
- report("info", 1, "cache", "Luaotfload cache: %s %s",
- (all and "erase" or "purge"), category)
- report("info", 1, "cache", "location: %s",path)
+ logreport ("info", 1, "cache", "Luaotfload cache: %s %s",
+ (all and "erase" or "purge"), category)
+ logreport ("info", 1, "cache", "location: %s", path)
local n = 0
for i=1,#list do
local filename = list[i]
if stringfind(filename,"luatex%-cache") then -- safeguard
if all then
- report("info", 5, "cache", "Removing %s.", filename)
+ logreport ("info", 5, "cache", "Removing %s.", filename)
osremove(filename)
n = n + 1
else
@@ -3319,7 +3373,7 @@ local purge_from_cache = function (category, path, list, all)
local checkname = file.replacesuffix(
filename, "lua", "luc")
if lfsisfile(checkname) then
- report("info", 5, "cache", "Removing %s.", filename)
+ logreport ("info", 5, "cache", "Removing %s.", filename)
osremove(filename)
n = n + 1
end
@@ -3327,7 +3381,7 @@ local purge_from_cache = function (category, path, list, all)
end
end
end
- report("info", 1, "cache", "Removed lua files : %i", n)
+ logreport ("info", 1, "cache", "Removed lua files : %i", n)
return true
end
@@ -3360,7 +3414,7 @@ end
local getwritablecachepath = function ( )
--- fonts.handlers.otf doesn’t exist outside a Luatex run,
--- so we have to improvise
- local writable = getwritablepath (config.luaotfload.paths.cache_dir)
+ local writable = getwritablepath (config.luaotfload.paths.cache_dir, "")
if writable then
return writable
end
@@ -3404,7 +3458,7 @@ local erase_cache = function ( )
end
local separator = function ( )
- report("info", 0, string.rep("-", 67))
+ logreport ("info", 0, string.rep("-", 67))
end
--- unit -> unit
@@ -3433,34 +3487,55 @@ end
--- export functionality to the namespace “fonts.names”
-----------------------------------------------------------------------
-names.set_font_filter = set_font_filter
-names.flush_lookup_cache = flush_lookup_cache
-names.save_lookups = save_lookups
-names.load = load_names
-names.access_font_index = access_font_index
-names.data = function () return name_index end
-names.save = save_names
-names.update = update_names
-names.font_file_lookup = font_file_lookup
-names.read_blacklist = read_blacklist
-names.sanitize_fontname = sanitize_fontname
-names.getfilename = resolve_fullpath
-names.getmetadata = getmetadata
-names.set_location_precedence = set_location_precedence
-names.count_font_files = count_font_files
-names.nth_font_filename = nth_font_filename
-names.font_slice = font_slice
-names.resolve_cached = resolve_cached
-names.resolve_name = resolve_name
-
---- font cache
-names.purge_cache = purge_cache
-names.erase_cache = erase_cache
-names.show_cache = show_cache
-
-names.find_closest = find_closest
-
--- for testing purpose
-names.read_fonts_conf = read_fonts_conf
+local export = {
+ set_font_filter = set_font_filter,
+ flush_lookup_cache = flush_lookup_cache,
+ save_lookups = save_lookups,
+ load = load_names,
+ access_font_index = access_font_index,
+ data = function () return name_index end,
+ save = save_names,
+ update = update_names,
+ lookup_font_file = lookup_font_file,
+ lookup_font_name = lookup_font_name,
+ lookup_font_name_cached = lookup_font_name_cached,
+ getfilename = lookup_fullpath,
+ lookup_fullpath = lookup_fullpath,
+ read_blacklist = read_blacklist,
+ sanitize_fontname = sanitize_fontname,
+ getmetadata = getmetadata,
+ set_location_precedence = set_location_precedence,
+ count_font_files = count_font_files,
+ nth_font_filename = nth_font_filename,
+ font_slice = font_slice,
+ --- font cache
+ purge_cache = purge_cache,
+ erase_cache = erase_cache,
+ show_cache = show_cache,
+ find_closest = find_closest,
+ -- for testing purpose
+}
+
+return {
+ init = function ()
+ --- the font loader namespace is “fonts”, same as in Context
+ --- we need to put some fallbacks into place for when running
+ --- as a script
+ if not fonts then return false end
+ logreport = luaotfload.log.report
+ local fonts = fonts
+ fonts.names = fonts.names or names
+ fonts.formats = fonts.formats or { }
+ fonts.definers = fonts.definers or { resolvers = { } }
+
+ names.blacklist = blacklist
+ names.version = 2.6
+ names.data = nil --- contains the loaded database
+ names.lookups = nil --- contains the lookup cache
+
+ for sym, ref in next, export do names[sym] = ref end
+ return true
+ end
+}
-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-diagnostics.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-diagnostics.lua
index 80e461ccac5..a3a18418e61 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-diagnostics.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-diagnostics.lua
@@ -3,9 +3,7 @@
-- FILE: luaotfload-diagnostics.lua
-- DESCRIPTION: functionality accessible by the --diagnose option
-- REQUIREMENTS: luaotfload-tool.lua
--- AUTHOR: Philipp Gesang (Phg), <phg42.2a@gmail.com>
--- VERSION: 2.5
--- MODIFIED: 2014-01-02 21:23:06+0100
+-- AUTHOR: Philipp Gesang <phg@phi-gamma.net>
-----------------------------------------------------------------------
--
local names = fonts.names
@@ -173,6 +171,11 @@ local get_permissions = function (t, location)
location = lpegmatch (stripslashes, location)
end
local attributes = lfsattributes (location)
+ if not attributes then
+ print""
+ print("attr", location, attributes)
+ os.exit()
+ end
if not attributes and t == "f" then
attributes = get_tentative_attributes (location)
@@ -240,12 +243,10 @@ local check_conformance = function (spec, permissions, errcnt)
return errcnt
end
-local desired_permissions
local init_desired_permissions = function ()
- inspect(config.luaotfload.paths)
local paths = config.luaotfload.paths
- desired_permissions = {
- { "d", {"r","w"}, function () return caches.getwritablepath () end },
+ return {
+ { "d", {"r","w"}, function () return caches.getwritablepath ("", "") end },
{ "d", {"r","w"}, paths.prefix },
{ "f", {"r","w"}, paths.index_path_lua .. ".gz" },
{ "f", {"r","w"}, paths.index_path_luc },
@@ -256,7 +257,7 @@ end
local check_permissions = function (errcnt)
out [[=============== file permissions ==============]]
- if not desired_permissions then init_desired_permissions () end
+ local desired_permissions = init_desired_permissions ()
for i = 1, #desired_permissions do
local t, spec, path = unpack (desired_permissions[i])
if type (path) == "function" then
@@ -655,27 +656,27 @@ local diagnose = function (job)
you may sleep well.")
return true, false
end
- out ( [[===============================================
- WARNING
- ===============================================
+ out ( [[===============================================
+ WARNING
+ ===============================================
- The diagnostic detected %d errors.
+ The diagnostic detected %d errors.
- This version of luaotfload may have been
- tampered with. Modified versions of the
- luaotfload source are unsupported. Read the log
- carefully and get a clean version from CTAN or
- github:
+ This version of luaotfload may have been
+ tampered with. Modified versions of the
+ luaotfload source are unsupported. Read the log
+ carefully and get a clean version from CTAN or
+ github:
- × http://www.ctan.org/pkg/luaotfload
- × https://github.com/lualatex/luaotfload/releases
+ × http://www.ctan.org/pkg/luaotfload
+ × https://github.com/lualatex/luaotfload/releases
- If you are uncertain as to how to proceed, then
- ask on the lualatex mailing list:
+ If you are uncertain as to how to proceed, then
+ ask on the lualatex mailing list:
- http://www.tug.org/mailman/listinfo/lualatex-dev
+ http://www.tug.org/mailman/listinfo/lualatex-dev
- ===============================================
+ ===============================================
]], errcnt)
return true, false
end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-features.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-features.lua
index 9b895ce47e3..962806cc163 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-features.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-features.lua
@@ -1,5 +1,5 @@
if not modules then modules = { } end modules ["features"] = {
- version = "2.5",
+ version = "2.6",
comment = "companion to luaotfload-main.lua",
author = "Hans Hagen, Khaled Hosny, Elie Roux, Philipp Gesang",
copyright = "PRAGMA ADE / ConTeXt Development Team",
@@ -921,24 +921,13 @@ end
---[[ end included font-ltx.lua ]]
---[[doc--
-This uses the code from luatex-fonts-merged (<- font-otc.lua) instead
-of the removed luaotfload-font-otc.lua.
-
-TODO find out how far we get setting features without these lines,
-relying on luatex-fonts only (it *does* handle features somehow, after
-all).
---doc]]--
-
--- we assume that the other otf stuff is loaded already
+-- We assume that the other otf stuff is loaded already; though there’s
+-- another check below during the initialization phase.
---[[ begin snippet from font-otc.lua ]]
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
local report_otf = logs.reporter("fonts","otf loading")
-local otf = fonts.handlers.otf
-local registerotffeature = otf.features.register
-
--[[HH--
In the userdata interface we can not longer tweak the loaded font as
@@ -960,7 +949,7 @@ setmetatableindex(types, function(t,k) t[k] = k return k end) -- "key"
local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } }
local noflags = { }
-local function addfeature(data,feature,specifications)
+local function addfeature (data, feature, specifications)
local descriptions = data.descriptions
local resources = data.resources
local lookups = resources.lookups
@@ -1100,26 +1089,9 @@ local function addfeature(data,feature,specifications)
end
end
-
-otf.enhancers.addfeature = addfeature
-
-local extrafeatures = { }
-
-function otf.addfeature(name,specification)
- extrafeatures[name] = specification
-end
-
-local function enhance(data,filename,raw)
- for feature, specification in next, extrafeatures do
- addfeature(data,feature,specification)
- end
-end
-
-otf.enhancers.register("check extra features",enhance)
-
---[[ end snippet from font-otc.lua ]]
-local tlig = {
+local tlig_specification = {
{
type = "substitution",
features = everywhere,
@@ -1167,9 +1139,6 @@ local tlig = {
},
}
-otf.addfeature ("tlig", tlig)
-otf.addfeature ("trep", { })
-
local anum_arabic = { --- these are the same as in font-otc
[0x0030] = 0x0660,
[0x0031] = 0x0661,
@@ -1228,11 +1197,45 @@ local anum_specification = {
},
}
-otf.addfeature ("anum", anum_specification)
+return {
+ init = function ()
+
+ if not fonts and fonts.handlers then
+ logreport ("log", 0, "color",
+ "OTF mechanisms missing -- did you forget to \z
+ load a font loader?")
+ return false
+ end
+
+ local otf = fonts.handlers.otf
-registerotffeature {
- name = "anum",
- description = "arabic digits",
+ local extrafeatures = {
+ tlig = tlig_specification,
+ trep = { },
+ anum = anum_specification,
+ }
+
+ otf.enhancers.register ("check extra features",
+ function (data,filename, raw)
+ for feature, specification in next, extrafeatures do
+ addfeature (data, feature, specification)
+ end
+ end)
+
+ logreport = luaotfload.log.report
+ if not fonts then
+ logreport ("log", 0, "color",
+ "OTF mechanisms missing -- did you forget to \z
+ load a font loader?")
+ return false
+ end
+
+ otf.features.register {
+ name = "anum",
+ description = "arabic digits",
+ }
+ return true
+ end
}
-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-cbk.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-cbk.lua
deleted file mode 100644
index 9db94f65e48..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-cbk.lua
+++ /dev/null
@@ -1,68 +0,0 @@
-if not modules then modules = { } end modules ['luatex-fonts-cbk'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
-
-local fonts = fonts
-local nodes = nodes
-
--- Fonts: (might move to node-gef.lua)
-
-local traverse_id = node.traverse_id
-local glyph_code = nodes.nodecodes.glyph
-
-function nodes.handlers.characters(head)
- local fontdata = fonts.hashes.identifiers
- if fontdata then
- local usedfonts, done, prevfont = { }, false, nil
- for n in traverse_id(glyph_code,head) do
- local font = n.font
- if font ~= prevfont then
- prevfont = font
- local used = usedfonts[font]
- if not used then
- local tfmdata = fontdata[font] --
- if tfmdata then
- local shared = tfmdata.shared -- we need to check shared, only when same features
- if shared then
- local processors = shared.processes
- if processors and #processors > 0 then
- usedfonts[font] = processors
- done = true
- end
- end
- end
- end
- end
- end
- if done then
- for font, processors in next, usedfonts do
- for i=1,#processors do
- local h, d = processors[i](head,font,0)
- head, done = h or head, done or d
- end
- end
- end
- return head, true
- else
- return head, false
- end
-end
-
-function nodes.simple_font_handler(head)
--- lang.hyphenate(head)
- head = nodes.handlers.characters(head)
- nodes.injections.handler(head)
- nodes.handlers.protectglyphs(head)
- head = node.ligaturing(head)
- head = node.kerning(head)
- return head
-end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-inj.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-inj.lua
deleted file mode 100644
index ae48150a6ca..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-inj.lua
+++ /dev/null
@@ -1,526 +0,0 @@
-if not modules then modules = { } end modules ['node-inj'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- This is very experimental (this will change when we have luatex > .50 and
--- a few pending thingies are available. Also, Idris needs to make a few more
--- test fonts. Btw, future versions of luatex will have extended glyph properties
--- that can be of help. Some optimizations can go away when we have faster machines.
-
--- todo: make a special one for context
-
-local next = next
-local utfchar = utf.char
-
-local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
-
-local report_injections = logs.reporter("nodes","injections")
-
-local attributes, nodes, node = attributes, nodes, node
-
-fonts = fonts
-local fontdata = fonts.hashes.identifiers
-
-nodes.injections = nodes.injections or { }
-local injections = nodes.injections
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-local kern_code = nodecodes.kern
-local nodepool = nodes.pool
-local newkern = nodepool.kern
-
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-
-local a_kernpair = attributes.private('kernpair')
-local a_ligacomp = attributes.private('ligacomp')
-local a_markbase = attributes.private('markbase')
-local a_markmark = attributes.private('markmark')
-local a_markdone = attributes.private('markdone')
-local a_cursbase = attributes.private('cursbase')
-local a_curscurs = attributes.private('curscurs')
-local a_cursdone = attributes.private('cursdone')
-
--- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
--- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
--- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
--- that this code is not 100% okay but examples are needed to figure things out.
-
-function injections.installnewkern(nk)
- newkern = nk or newkern
-end
-
-local cursives = { }
-local marks = { }
-local kerns = { }
-
--- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in
--- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we
--- can share tables.
-
--- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs
--- checking with husayni (volt and fontforge).
-
-function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
- local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
- local ws, wn = tfmstart.width, tfmnext.width
- local bound = #cursives + 1
- start[a_cursbase] = bound
- nxt[a_curscurs] = bound
- cursives[bound] = { rlmode, dx, dy, ws, wn }
- return dx, dy, bound
-end
-
-function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
- local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
- -- dy = y - h
- if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = current[a_kernpair]
- if bound then
- local kb = kerns[bound]
- -- inefficient but singles have less, but weird anyway, needs checking
- kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
- else
- bound = #kerns + 1
- current[a_kernpair] = bound
- kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
- end
- return x, y, w, h, bound
- end
- return x, y, w, h -- no bound
-end
-
-function injections.setkern(current,factor,rlmode,x,tfmchr)
- local dx = factor*x
- if dx ~= 0 then
- local bound = #kerns + 1
- current[a_kernpair] = bound
- kerns[bound] = { rlmode, dx }
- return dx, bound
- else
- return 0, 0
- end
-end
-
-function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor
- local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
- local bound = base[a_markbase] -- fails again we should pass it
- local index = 1
- if bound then
- local mb = marks[bound]
- if mb then
- -- if not index then index = #mb + 1 end
- index = #mb + 1
- mb[index] = { dx, dy, rlmode }
- start[a_markmark] = bound
- start[a_markdone] = index
- return dx, dy, bound
- else
- report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
- end
- end
--- index = index or 1
- index = index or 1
- bound = #marks + 1
- base[a_markbase] = bound
- start[a_markmark] = bound
- start[a_markdone] = index
- marks[bound] = { [index] = { dx, dy, rlmode, baseismark } }
- return dx, dy, bound
-end
-
-local function dir(n)
- return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
-end
-
-local function trace(head)
- report_injections("begin run")
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local kp = n[a_kernpair]
- local mb = n[a_markbase]
- local mm = n[a_markmark]
- local md = n[a_markdone]
- local cb = n[a_cursbase]
- local cc = n[a_curscurs]
- local char = n.char
- report_injections("font %s, char %U, glyph %c",n.font,char,char)
- if kp then
- local k = kerns[kp]
- if k[3] then
- report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
- else
- report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
- end
- end
- if mb then
- report_injections(" markbase: bound %a",mb)
- end
- if mm then
- local m = marks[mm]
- if mb then
- local m = m[mb]
- if m then
- report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
- else
- report_injections(" markmark: bound %a, missing index",mm)
- end
- else
- m = m[1]
- report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
- end
- end
- if cb then
- report_injections(" cursbase: bound %a",cb)
- end
- if cc then
- local c = cursives[cc]
- report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
- end
- end
- end
- report_injections("end run")
-end
-
--- todo: reuse tables (i.e. no collection), but will be extra fields anyway
--- todo: check for attribute
-
--- We can have a fast test on a font being processed, so we can check faster for marks etc
--- but I'll make a context variant anyway.
-
-local function show_result(head)
- local current = head
- local skipping = false
- while current do
- local id = current.id
- if id == glyph_code then
- report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
- skipping = false
- elseif id == kern_code then
- report_injections("kern: %p",current.kern)
- skipping = false
- elseif not skipping then
- report_injections()
- skipping = true
- end
- current = current.next
- end
-end
-
-function injections.handler(head,where,keep)
- local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
- if has_marks or has_cursives then
- if trace_injections then
- trace(head)
- end
- -- in the future variant we will not copy items but refs to tables
- local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0
- if has_kerns then -- move outside loop
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
- end
- local k = n[a_kernpair]
- if k then
- local kk = kerns[k]
- if kk then
- local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
- local dy = y - h
- if dy ~= 0 then
- ky[n] = dy
- end
- if w ~= 0 or x ~= 0 then
- wx[n] = kk
- end
- rl[n] = kk[1] -- could move in test
- end
- end
- end
- end
- else
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
- end
- end
- end
- end
- if nofvalid > 0 then
- -- we can assume done == true because we have cursives and marks
- local cx = { }
- if has_kerns and next(ky) then
- for n, k in next, ky do
- n.yoffset = k
- end
- end
- -- todo: reuse t and use maxt
- if has_cursives then
- local p_cursbase, p = nil, nil
- -- since we need valid[n+1] we can also use a "while true do"
- local t, d, maxt = { }, { }, 0
- for i=1,nofvalid do -- valid == glyphs
- local n = valid[i]
- if not mk[n] then
- local n_cursbase = n[a_cursbase]
- if p_cursbase then
- local n_curscurs = n[a_curscurs]
- if p_cursbase == n_curscurs then
- local c = cursives[n_curscurs]
- if c then
- local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5]
- if rlmode >= 0 then
- dx = dx - ws
- else
- dx = dx + wn
- end
- if dx ~= 0 then
- cx[n] = dx
- rl[n] = rlmode
- end
- -- if rlmode and rlmode < 0 then
- dy = -dy
- -- end
- maxt = maxt + 1
- t[maxt] = p
- d[maxt] = dy
- else
- maxt = 0
- end
- end
- elseif maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ti.yoffset + ny
- end
- maxt = 0
- end
- if not n_cursbase and maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- p_cursbase, p = n_cursbase, n
- end
- end
- if maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- if not keep then
- cursives = { }
- end
- end
- if has_marks then
- for i=1,nofvalid do
- local p = valid[i]
- local p_markbase = p[a_markbase]
- if p_markbase then
- local mrks = marks[p_markbase]
- local nofmarks = #mrks
- for n in traverse_id(glyph_code,p.next) do
- local n_markmark = n[a_markmark]
- if p_markbase == n_markmark then
- local index = n[a_markdone] or 1
- local d = mrks[index]
- if d then
- local rlmode = d[3]
- --
- local k = wx[p]
- if k then
- local x = k[2]
- local w = k[4]
- if w then
- if rlmode and rlmode >= 0 then
- -- kern(x) glyph(p) kern(w-x) mark(n)
- n.xoffset = p.xoffset - p.width + d[1] - (w-x)
- else
- -- kern(w-x) glyph(p) kern(x) mark(n)
- n.xoffset = p.xoffset - d[1] - x
- end
- else
- if rlmode and rlmode >= 0 then
- -- okay for husayni
- n.xoffset = p.xoffset - p.width + d[1]
- else
- -- needs checking: is x ok here?
- n.xoffset = p.xoffset - d[1] - x
- end
- end
- else
- if rlmode and rlmode >= 0 then
- n.xoffset = p.xoffset - p.width + d[1]
- else
- n.xoffset = p.xoffset - d[1]
- end
- local w = n.width
- if w ~= 0 then
- insert_node_before(head,n,newkern(-w/2))
- insert_node_after(head,n,newkern(-w/2))
- end
- end
- -- --
- if mk[p] then
- n.yoffset = p.yoffset + d[2]
- else
- n.yoffset = n.yoffset + p.yoffset + d[2]
- end
- --
- if nofmarks == 1 then
- break
- else
- nofmarks = nofmarks - 1
- end
- end
- else
- -- KE: there can be <mark> <mkmk> <mark> sequences in ligatures
- end
- end
- end
- end
- if not keep then
- marks = { }
- end
- end
- -- todo : combine
- if next(wx) then
- for n, k in next, wx do
- -- only w can be nil (kernclasses), can be sped up when w == nil
- local x = k[2]
- local w = k[4]
- if w then
- local rl = k[1] -- r2l = k[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx)) -- type 0/2
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x)) -- type 0/2
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x)) -- type 0/2
- end
- if wx ~= 0 then
- insert_node_after (head,n,newkern(wx)) -- type 0/2
- end
- end
- elseif x ~= 0 then
- -- this needs checking for rl < 0 but it is unlikely that a r2l script
- -- uses kernclasses between glyphs so we're probably safe (KE has a
- -- problematic font where marks interfere with rl < 0 in the previous
- -- case)
- insert_node_before(head,n,newkern(x)) -- a real font kern, type 0
- end
- end
- end
- if next(cx) then
- for n, k in next, cx do
- if k ~= 0 then
- local rln = rl[n]
- if rln and rln < 0 then
- insert_node_before(head,n,newkern(-k)) -- type 0/2
- else
- insert_node_before(head,n,newkern(k)) -- type 0/2
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- -- if trace_injections then
- -- show_result(head)
- -- end
- return head, true
- elseif not keep then
- kerns, cursives, marks = { }, { }, { }
- end
- elseif has_kerns then
- if trace_injections then
- trace(head)
- end
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local k = n[a_kernpair]
- if k then
- local kk = kerns[k]
- if kk then
- local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
- if y and y ~= 0 then
- n.yoffset = y -- todo: h ?
- end
- if w then
- -- copied from above
- -- local r2l = kk[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
- end
- else
- -- simple (e.g. kernclass kerns)
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- end
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- -- if trace_injections then
- -- show_result(head)
- -- end
- return head, true
- else
- -- no tracing needed
- end
- return head, false
-end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-init.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-init.lua
new file mode 100644
index 00000000000..895e32ef5dd
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-init.lua
@@ -0,0 +1,573 @@
+#!/usr/bin/env texlua
+-----------------------------------------------------------------------
+-- FILE: luaotfload-init.lua
+-- DESCRIPTION: Luaotfload font loader initialization
+-- REQUIREMENTS: luatex v.0.80 or later; packages lualibs
+-- AUTHOR: Philipp Gesang (Phg), <phg@phi-gamma.net>
+-----------------------------------------------------------------------
+--
+
+local setmetatable = setmetatable
+local kpselookup = kpse.lookup
+
+--[[doc--
+
+ Initialization phases:
+
+ - Load Lualibs from package
+ - Set up the logger routines
+ - Load Fontloader
+ - as package specified in configuration
+ - from Context install
+ - (optional: from raw unpackaged files distributed with
+ Luaotfload)
+
+ The initialization of the Lualibs may be made configurable in the
+ future as well allowing to load both the files and the merged package
+ depending on a configuration setting. However, this would require
+ separating out the configuration parser into a self-contained
+ package, which might be problematic due to its current dependency on
+ the Lualibs itself.
+
+--doc]]--
+
+local log --- filled in after loading the log module
+local logreport --- filled in after loading the log module
+
+--[[doc--
+
+ \subsection{Preparing the Font Loader}
+ We treat the fontloader as a semi-black box so behavior is
+ consistent between formats.
+ We load the fontloader code directly in the same fashion as the
+ Plain format \identifier{luatex-fonts} that is part of Context.
+ How this is executed depends on the presence on the
+ \emphasis{merged font loader code}.
+ In \identifier{luaotfload} this is contained in the file
+ \fileent{luaotfload-merged.lua}.
+ If this file cannot be found, the original libraries from \CONTEXT
+ of which the merged code was composed are loaded instead.
+ Since these files are not shipped with Luaotfload, an installation
+ of Context is required.
+ (Since we pull the fontloader directly from the Context minimals,
+ the necessary Context version is likely to be more recent than that
+ of other TeX distributions like Texlive.)
+ The imported font loader will call \luafunction{callback.register}
+ once while reading \fileent{font-def.lua}.
+ This is unavoidable unless we modify the imported files, but
+ harmless if we make it call a dummy instead.
+ However, this problem might vanish if we decide to do the merging
+ ourselves, like the \identifier{lualibs} package does.
+ With this step we would obtain the freedom to load our own
+ overrides in the process right where they are needed, at the cost
+ of losing encapsulation.
+ The decision on how to progress is currently on indefinite hold.
+
+--doc]]--
+
+
+local init_early = function ()
+
+ local store = { }
+ config = config or { } --- global
+ config.luaotfload = config.luaotfload or { }
+ config.lualibs = config.lualibs or { }
+ config.lualibs.verbose = false
+ config.lualibs.prefer_merged = true
+ config.lualibs.load_extended = true
+ fonts = fonts or { }
+
+ require "lualibs"
+
+ if not lualibs then error "this module requires Luaotfload" end
+ if not luaotfload then error "this module requires Luaotfload" end
+
+ --[[doc--
+
+ The logger needs to be in place prior to loading the fontloader due
+ to order of initialization being crucial for the logger functions
+ that are swapped.
+
+ --doc]]--
+
+ luaotfload.loaders.luaotfload "log"
+ log = luaotfload.log
+ logreport = log.report
+ log.set_loglevel (default_log_level)
+
+ logreport ("log", 4, "init", "Concealing callback.register().")
+ store.trapped_register = callback.register
+
+ callback.register = function (id)
+ logreport ("log", 4, "init",
+ "Dummy callback.register() invoked on %s.",
+ id)
+ end
+
+ --[[doc--
+
+ By default, the fontloader requires a number of \emphasis{private
+ attributes} for internal use.
+ These must be kept consistent with the attribute handling methods
+ as provided by \identifier{luatexbase}.
+ Our strategy is to override the function that allocates new
+ attributes before we initialize the font loader, making it a
+ wrapper around \luafunction{luatexbase.new_attribute}.\footnote{%
+ Many thanks, again, to Hans Hagen for making this part
+ configurable!
+ }
+ The attribute identifiers are prefixed “\fileent{luaotfload@}” to
+ avoid name clashes.
+
+ --doc]]--
+
+ local new_attribute = luatexbase.new_attribute
+ local the_attributes = luatexbase.attributes
+
+ attributes = attributes or { } --- this writes a global, sorry
+
+ attributes.private = function (name)
+ local attr = "luaotfload@" .. name --- used to be: “otfl@”
+ local number = the_attributes[attr]
+ if not number then number = new_attribute(attr) end
+ return number
+ end
+
+ luaotfload.loaders.fontloader "basics-gen"
+
+ return store
+end --- [init_early]
+
+--[[doc--
+
+ These next lines replicate the behavior of
+ \fileent{luatex-fonts.lua}.
+
+--doc]]--
+
+local push_namespaces = function ()
+ logreport ("log", 4, "init", "push namespace for font loader")
+ local normalglobal = { }
+ for k, v in next, _G do
+ normalglobal[k] = v
+ end
+ return normalglobal
+end
+
+local pop_namespaces = function (normalglobal,
+ isolate,
+ context_environment)
+ if normalglobal then
+ local _G = _G
+ local mode = "non-destructive"
+ if isolate then mode = "destructive" end
+ logreport ("log", 4, "init", "pop namespace from font loader -- " .. mode)
+ for k, v in next, _G do
+ if not normalglobal[k] then
+ context_environment[k] = v
+ if isolate then
+ _G[k] = nil
+ end
+ end
+ end
+ for k, v in next, normalglobal do
+ _G[k] = v
+ end
+ -- just to be sure:
+ setmetatable(context_environment, _G)
+ else
+ logreport ("both", 0, "init",
+ "irrecoverable error during pop_namespace: no globals to restore")
+ os.exit ()
+ end
+end
+
+--- below paths are relative to the texmf-context
+local ltx = "tex/generic/context/luatex"
+local ctx = "tex/context/base"
+
+local context_modules = {
+
+ --- Since 2.6 those are directly provided by the Lualibs package.
+ { false, "l-lua" },
+ { false, "l-lpeg" },
+ { false, "l-function" },
+ { false, "l-string" },
+ { false, "l-table" },
+ { false, "l-io" },
+ { false, "l-file" },
+ { false, "l-boolean" },
+ { false, "l-math" },
+ { false, "util-str" },
+
+ --- These constitute the fontloader proper.
+ { ltx, "luatex-basics-gen" },
+ { ctx, "data-con" },
+ { ltx, "luatex-basics-nod" },
+ { ctx, "font-ini" },
+ { ctx, "font-con" },
+ { ltx, "luatex-fonts-enc" },
+ { ctx, "font-cid" },
+ { ctx, "font-map" },
+ { ltx, "luatex-fonts-syn" },
+ { ltx, "luatex-fonts-tfm" },
+ { ctx, "font-oti" },
+ { ctx, "font-otf" },
+ { ctx, "font-otb" },
+ { ltx, "luatex-fonts-inj" }, --> since 2014-01-07, replaces node-inj.lua
+ { ltx, "luatex-fonts-ota" },
+ { ltx, "luatex-fonts-otn" }, --> since 2014-01-07, replaces font-otn.lua
+ { ctx, "font-otp" }, --> since 2013-04-23
+ { ltx, "luatex-fonts-lua" },
+ { ctx, "font-def" },
+ { ltx, "luatex-fonts-def" },
+ { ltx, "luatex-fonts-ext" },
+ { ltx, "luatex-fonts-cbk" },
+
+} --[[context_modules]]
+
+local load_context_modules = function (pth)
+
+ local load_module = luaotfload.loaders.context
+ local ignore_module = luaotfload.loaders.ignore
+
+ logreport ("both", 2, "init",
+ "Loading fontloader components from context.")
+ local n = #context_modules
+ for i = 1, n do
+ local sub, spec = unpack (context_modules [i])
+ if sub == false then
+ ignore_module (spec)
+ elseif type (sub) == "string" then
+ if pth then
+ load_module (spec, file.join (pth, sub))
+ else
+ load_module (spec)
+ end
+ else
+ logreport ("both", 0, "init",
+ "Internal error, please report. \z
+ This is not your fault.")
+ os.exit (-1)
+ end
+ end
+
+end
+
+local init_adapt = function ()
+
+ local context_environment = { }
+ local our_environment = push_namespaces ()
+
+ --[[doc--
+
+ The font loader requires that the attribute with index zero be
+ zero. We happily oblige.
+ (Cf. \fileent{luatex-fonts-nod.lua}.)
+
+ --doc]]--
+
+ tex.attribute[0] = 0
+
+ return our_environment, context_environment
+
+end --- [init_adapt]
+
+local init_main = function ()
+
+ local load_fontloader_module = luaotfload.loaders.fontloader
+ local ignore_module = luaotfload.loaders.ignore
+
+ --[[doc--
+
+ Now that things are sorted out we can finally load the
+ fontloader.
+
+ For less current distibutions we ship the code from TL 2014 that
+ should be compatible with Luatex 0.76.
+
+ --doc]]--
+
+ local fontloader = config.luaotfload and config.luaotfload.run.fontloader
+ or "reference"
+ fontloader = tostring (fontloader)
+
+ if fontloader == "reference" then
+ logreport ("log", 4, "init", "Using reference fontloader.")
+ load_fontloader_module (luaotfload.fontloader_package)
+
+ elseif fontloader == "default" then
+ --- Same as above but loader name not correctly replaced by the file name
+ --- of our fontloader package. Perhaps something’s wrong with the status
+ --- file which contains the datestamped filename? In any case, it can’t
+ --- hurt reporting it as a bug.
+ logreport ("both", 0, "init", "Fontloader substitution failed, got “default”.")
+ logreport ("log", 4, "init", "Falling back to reference fontloader.")
+ load_fontloader_module (luaotfload.fontloader_package)
+
+ elseif fontloader == "unpackaged" then
+ logreport ("both", 4, "init",
+ "Loading fontloader components individually.")
+ --- The loading sequence is known to change, so this might have to be
+ --- updated with future updates. Do not modify it though unless there is
+ --- a change to the upstream package!
+
+ --- Since 2.6 those are directly provided by the Lualibs package.
+ ignore_module "l-lua"
+ ignore_module "l-lpeg"
+ ignore_module "l-function"
+ ignore_module "l-string"
+ ignore_module "l-table"
+ ignore_module "l-io"
+ ignore_module "l-file"
+ ignore_module "l-boolean"
+ ignore_module "l-math"
+ ignore_module "util-str"
+ ignore_module "luatex-basics-gen"
+
+ --- These constitute the fontloader proper.
+ load_fontloader_module "data-con"
+ load_fontloader_module "basics-nod"
+ load_fontloader_module "font-ini"
+ load_fontloader_module "font-con"
+ load_fontloader_module "fonts-enc"
+ load_fontloader_module "font-cid"
+ load_fontloader_module "font-map"
+ load_fontloader_module "fonts-syn"
+ load_fontloader_module "fonts-tfm"
+ load_fontloader_module "font-oti"
+ load_fontloader_module "font-otf"
+ load_fontloader_module "font-otb"
+ load_fontloader_module "fonts-inj" --> since 2014-01-07, replaces node-inj.lua
+ load_fontloader_module "fonts-ota"
+ load_fontloader_module "fonts-otn" --> since 2014-01-07, replaces font-otn.lua
+ load_fontloader_module "font-otp" --> since 2013-04-23
+ load_fontloader_module "fonts-lua"
+ load_fontloader_module "font-def"
+ load_fontloader_module "fonts-def"
+ load_fontloader_module "fonts-ext"
+ load_fontloader_module "fonts-cbk"
+
+ elseif fontloader == "context" then
+ logreport ("both", 2, "init",
+ "Attempting to load Context modules in lookup path.")
+ load_context_modules ()
+
+ elseif lfs.isdir (fontloader) then
+ logreport ("both", 2, "init",
+ "Attempting to load Context files under prefix “%s”.",
+ fontloader)
+ load_context_modules (fontloader)
+
+ elseif lfs.isfile (fontloader) then
+ logreport ("both", 2, "init",
+ "Attempting to load fontloader from absolute path “%s”.",
+ fontloader)
+ local _void = require (fontloader)
+
+ elseif kpselookup (fontloader) then
+ local path = kpselookup (fontloader)
+ logreport ("both", 2, "init",
+ "Attempting to load fontloader “%s” from kpse-resolved path “%s”.",
+ fontloader, path)
+ local _void = require (path)
+
+ elseif fontloader then
+ logreport ("log", 4, "init",
+ "Using predefined fontloader “%s”.",
+ fontloader)
+ load_fontloader_module (fontloader)
+
+ else
+ logreport ("log", 4, "init",
+ "No match for requested fontloader “%s”.",
+ fontloader)
+ fontloader = luaotfload.fontloader_package
+ logreport ("log", 4, "init",
+ "Defaulting to predefined fontloader “%s”.",
+ fontloader)
+ load_fontloader_module (fontloader)
+ end
+
+ ---load_fontloader_module "font-odv.lua" --- <= Devanagari support from Context
+
+ logreport ("log", 0, "init",
+ "Context OpenType loader version “%s”",
+ fonts.handlers.otf.version)
+end --- [init_main]
+
+local init_cleanup = function (store)
+ --- reinstate all the stuff we had to move out of the way to
+ --- accomodate the loader
+
+ --[[doc--
+
+ Here we adjust the globals created during font loader
+ initialization. If the second argument to
+ \luafunction{pop_namespaces()} is \verb|true| this will restore the
+ state of \luafunction{_G}, eliminating every global generated since
+ the last call to \luafunction{push_namespaces()}. At the moment we
+ see no reason to do this, and since the font loader is considered
+ an essential part of \identifier{luatex} as well as a very well
+ organized piece of code, we happily concede it the right to add to
+ \luafunction{_G} if needed.
+
+ --doc]]--
+
+ pop_namespaces (store.our_environment,
+ false,
+ store.context_environment)
+
+ --[[doc--
+
+ \subsection{Callbacks}
+ After the fontloader is ready we can restore the callback trap
+ from \identifier{luatexbase}.
+
+ --doc]]--
+
+ logreport ("log", 4, "init",
+ "Restoring original callback.register().")
+ callback.register = store.trapped_register
+end --- [init_cleanup]
+
+local init_post_install_callbacks = function ()
+ --[[doc--
+
+ we do our own callback handling with the means provided by
+ luatexbase.
+ note: \luafunction{pre_linebreak_filter} and
+ \luafunction{hpack_filter} are coupled in \context in the
+ concept of \emphasis{node processor}.
+
+ --doc]]--
+
+ luatexbase.add_to_callback("pre_linebreak_filter",
+ nodes.simple_font_handler,
+ "luaotfload.node_processor",
+ 1)
+ luatexbase.add_to_callback("hpack_filter",
+ nodes.simple_font_handler,
+ "luaotfload.node_processor",
+ 1)
+end
+
+local init_post_load_agl = function ()
+
+ --[[doc--
+
+ Adobe Glyph List.
+ -----------------------------------------------------------------
+
+ Context provides a somewhat different font-age.lua from an
+ unclear origin. Unfortunately, the file name it reads from is
+ hard-coded in font-enc.lua, so we have to replace the entire
+ table.
+
+ This shouldn’t cause any complications. Due to its implementation
+ the glyph list will be loaded upon loading a OTF or TTF for the
+ first time during a TeX run. (If one sticks to TFM/OFM then it is
+ never read at all.) For this reason we can install a metatable
+ that looks up the file of our choosing and only falls back to the
+ Context one in case it cannot be found.
+
+ --doc]]--
+
+ local findfile = resolvers.findfile
+ local encodings = fonts.encodings
+
+ if not findfile or not encodings then
+ --- Might happen during refactoring; we continue graciously but in
+ --- a somewhat defect state.
+ logreport ("log", 0, "init",
+ "preconditions unmet, skipping the Adobe Glyph List; "
+ .. "this is a Luaotfload bug.")
+ return
+ end
+
+ if next (fonts.encodings.agl) then
+ --- unnecessary because the file shouldn’t be loaded at this time
+ --- but we’re just making sure
+ fonts.encodings.agl = nil
+ collectgarbage"collect"
+ end
+
+ local agl_init = { } --- start out empty, fill on demand
+ encodings.agl = agl_init --- ugh, replaced again later
+
+ setmetatable (agl_init, { __index = function (t, k)
+
+ if k ~= "unicodes" then
+ return nil
+ end
+
+ local glyphlist = findfile "luaotfload-glyphlist.lua"
+ if glyphlist then
+ logreport ("log", 1, "init", "loading the Adobe glyph list")
+ else
+ glyphlist = findfile "font-age.lua"
+ logreport ("both", 0, "init",
+ "loading the extended glyph list from ConTeXt")
+ end
+
+ if not glyphlist then
+ logreport ("both", 4, "init",
+ "Adobe glyph list not found, please check your installation.")
+ return nil
+ end
+ logreport ("both", 4, "init",
+ "found Adobe glyph list file at ``%s``, using that.",
+ glyphlist)
+
+ local unicodes = dofile(glyphlist)
+ encodings.agl = { unicodes = unicodes }
+ return unicodes
+ end })
+
+end
+
+--- (unit -> unit) list
+local init_post_actions = {
+ init_post_install_callbacks,
+ init_post_load_agl,
+}
+
+--- unit -> size_t
+local init_post = function ()
+ --- hook for actions that need to take place after the fontloader is
+ --- installed
+
+ local n = #init_post_actions
+ for i = 1, n do
+ local action = init_post_actions[i]
+ local taction = type (action)
+ if not action or taction ~= "function" then
+ logreport ("both", 1, "init",
+ "post hook WARNING: action %d not a function but %s/%s; ignoring.",
+ i, action, taction)
+ else
+ --- call closure
+ action ()
+ end
+ end
+
+ return n
+end --- [init_post]
+
+return {
+ early = init_early,
+ main = function (store)
+ local starttime = os.gettimeofday ()
+ store.our_environment, store.context_environment = init_adapt ()
+ init_main ()
+ init_cleanup (store)
+ logreport ("both", 1, "init",
+ "fontloader loaded in %0.3f seconds",
+ os.gettimeofday() - starttime)
+ local n = init_post ()
+ logreport ("both", 5, "init", "post hook terminated, %d actions performed", n)
+ return true
+ end
+}
+
+-- vim:tw=79:sw=2:ts=2:expandtab
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-letterspace.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-letterspace.lua
index 20f29f53cb6..5fa25f9d343 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-letterspace.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-letterspace.lua
@@ -6,8 +6,11 @@ if not modules then modules = { } end modules ['letterspace'] = {
license = "see context related readme files"
}
+--- This code diverged quite a bit from its origin in Context. Please
+--- do *not* report bugs on the Context list.
+
local log = luaotfload.log
-local report = log.report
+local logreport = log.report
local getmetatable = getmetatable
local require = require
@@ -17,16 +20,28 @@ local tonumber = tonumber
local next = next
local nodes, node, fonts = nodes, node, fonts
-local find_node_tail = node.tail or node.slide
-local free_node = node.free
-local copy_node = node.copy
-local new_node = node.new
-local insert_node_before = node.insert_before
-
-local nodepool = nodes.pool
-
+--- As of December 2014 the faster ``node.direct.*`` interface is
+--- preferred.
+local nodedirect = nodes.nuts
+local getchar = nodedirect.getchar
+local getfont = nodedirect.getfont
+local getid = nodedirect.getid
+local getnext = nodedirect.getnext
+local getprev = nodedirect.getprev
+local getfield = nodedirect.getfield
+local setfield = nodedirect.setfield
+local getsubtype = nodedirect.getsubtype
+local find_node_tail = nodedirect.tail
+local todirect = nodedirect.tonut
+local tonode = nodedirect.tonode
+
+local insert_node_before = nodedirect.insert_before
+local free_node = nodedirect.free
+local copy_node = nodedirect.copy
+local new_node = nodedirect.new
+
+local nodepool = nodedirect.pool
local new_kern = nodepool.kern
-local new_glue = nodepool.glue
local nodecodes = nodes.nodecodes
@@ -34,6 +49,7 @@ local glyph_code = nodecodes.glyph
local kern_code = nodecodes.kern
local disc_code = nodecodes.disc
local math_code = nodecodes.math
+local glue_code = nodecodes.glue
local fonthashes = fonts.hashes
local chardata = fonthashes.characters
@@ -68,44 +84,51 @@ local kerncodes = bothways { [0] = "fontkern"
, [1] = "userkern"
, [2] = "accentkern"
}
+local skipcodes = bothways { [0] = "userskip"
+ , [13] = "spaceskip"
+ , [14] = "xspaceskip"
+ }
-kerncodes.kerning = kerncodes.fontkern --- idiosyncrasy
-local kerning_code = kerncodes.kerning
-local userkern_code = kerncodes.userkern
-
+kerncodes.kerning = kerncodes.fontkern --- idiosyncrasy
+local kerning_code = kerncodes.kerning
+local userkern_code = kerncodes.userkern
+local userskip_code = skipcodes.userskip
+local spaceskip_code = skipcodes.spaceskip
+local xspaceskip_code = skipcodes.xspaceskip
-----------------------------------------------------------------------
--- node-res
-----------------------------------------------------------------------
-nodes.pool = nodes.pool or { }
-local pool = nodes.pool
-
-local kern = new_node ("kern", kerncodes.userkern)
local glue_spec = new_node "glue_spec"
-pool.kern = function (k)
- local n = copy_node (kern)
- n.kern = k
- return n
+local new_gluespec = function (width,
+ stretch, shrink,
+ stretch_order, shrink_order)
+ local spec = copy_node(glue_spec)
+ if width then setfield(spec, "width" , width ) end
+ if stretch then setfield(spec, "stretch" , stretch ) end
+ if shrink then setfield(spec, "shrink" , shrink ) end
+ if stretch_order then setfield(spec, "stretch_order", stretch_order) end
+ if shrink_order then setfield(spec, "shrink_order" , shrink_order ) end
+ return spec
end
-pool.glue = function (width, stretch, shrink,
- stretch_order, shrink_order)
- local n = new_node"glue"
- if not width then
+local new_glue = function (width, stretch, shrink,
+ stretch_order, shrink_order)
+ local n = new_node "glue"
+ if not width then return n end
-- no spec
- elseif width == false or tonumber(width) then
- local s = copy_node(glue_spec)
- if width then s.width = width end
- if stretch then s.stretch = stretch end
- if shrink then s.shrink = shrink end
- if stretch_order then s.stretch_order = stretch_order end
- if shrink_order then s.shrink_order = shrink_order end
- n.spec = s
+ if width == false then
+ local width = tonumber(width)
+ if width then
+ setfield(n, "spec",
+ new_gluespec(width, stretch, shrink,
+ stretch_order, shrink_order))
+ end
else
-- shared
- n.spec = copy_node(width)
+ setfield(n, "spec", copy_node(width))
end
return n
end
@@ -187,13 +210,28 @@ end
local kern_injector = function (fillup, kern)
if fillup then
local g = new_glue(kern)
- local s = g.spec
- s.stretch = kern
- s.stretch_order = 1
+ local s = getfield(g, "spec")
+ setfield(s, "stretch", kern)
+ setfield(s, "stretch_order", 1)
return g
- else
- return new_kern(kern)
end
+ return new_kern(kern)
+end
+
+local kernable_skip = function (n)
+ local st = getsubtype (n)
+ return st == userskip_code
+ or st == spaceskip_code
+ or st == xspaceskip_code
+end
+
+local function spec_injector (fillup, width, stretch, shrink)
+ if fillup then
+ local spec = new_gluespec(width, 2 * stretch, 2 * shrink)
+ setfield(spec, "stretch_order", 1)
+ return spec
+ end
+ return new_gluespec(width,stretch,shrink)
end
--[[doc--
@@ -219,16 +257,14 @@ kerncharacters = function (head)
local identifiers = fonthashes.identifiers
local kernfactors = kernfactors
-
local firstkern = true
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
-
--- 1) look up kern factor (slow, but cached rudimentarily)
local krn
- local fontid = start.font
+ local fontid = getfont(start)
do
krn = kernfactors[fontid]
if not krn then
@@ -249,7 +285,7 @@ kerncharacters = function (head)
goto nextnode
elseif firstkern then
firstkern = false
- if (id ~= disc_code) and (not start.components) then
+ if (id ~= disc_code) and (not getfield(start, "components")) then
--- not a ligature, skip node
goto nextnode
end
@@ -266,7 +302,7 @@ kerncharacters = function (head)
lastfont = fontid
--- 2) resolve ligatures
- local c = start.components
+ local c = getfield(start, "components")
if c then
if keepligature and keepligature(start) then
@@ -274,20 +310,20 @@ kerncharacters = function (head)
else
--- c = kerncharacters (c) --> taken care of after replacing
local s = start
- local p, n = s.prev, s.next
+ local p, n = getprev(s), s.next
local tail = find_node_tail(c)
if p then
- p.next = c
- c.prev = p
+ setfield(p, "next", c)
+ p = getprev(c)
else
head = c
end
if n then
- n.prev = tail
+ tail = getprev(n)
end
- tail.next = n
+ setnext(tail, "next", n)
start = c
- s.components = nil
+ setfield(s, "components", nil)
-- we now leak nodes !
-- free_node(s)
done = true
@@ -295,33 +331,58 @@ kerncharacters = function (head)
end -- kern ligature
--- 3) apply the extra kerning
- local prev = start.prev
+ local prev = getprev(start)
if prev then
- local pid = prev.id
+ local pid = getid(prev)
if not pid then
-- nothing
+ elseif pid == glue_code and kernable_skip(prev) then
+ local spec = getfield(prev, "spec")
+ local wd = getfield(spec, "width")
+ if wd > 0 then
+ --- formula taken from Context
+ --- existing_width extended by four times the
+ --- width times the font’s kernfactor
+ local newwd = wd + --[[two en to a quad]] 4 * wd * krn
+ local stretched = (getfield(spec,"stretch") * newwd) / wd
+ local shrunk = (getfield(spec,"shrink") * newwd) / wd
+ setfield(prev, "spec",
+ spec_injector(fillup, newwd, stretched, shrunk))
+ done = true
+ end
+
elseif pid == kern_code then
- if prev.subtype == kerning_code --- context does this by means of an
- or prev.subtype == userkern_code --- attribute; we may need a test
+ local prev_subtype = getsubtype(prev)
+ if prev_subtype == kerning_code --- context does this by means of an
+ or prev_subtype == userkern_code --- attribute; we may need a test
then
- if keeptogether and prev.prev.id == glyph_code and keeptogether(prev.prev,start) then
+
+ local pprev = getprev(prev)
+ local pprev_id = getid(pprev)
+
+ if keeptogether
+ and pprev_id == glyph_code
+ and keeptogether(pprev, start)
+ then
-- keep
else
- prev.subtype = userkern_code
- prev.kern = prev.kern + quaddata[lastfont]*krn -- here
+ prev_subtype = userkern_code
+ local prev_kern = getfield(prev, "kern")
+ prev_kern = prev_kern + quaddata[lastfont] * krn
done = true
end
end
elseif pid == glyph_code then
- if prev.font == lastfont then
- local prevchar, lastchar = prev.char, start.char
- if keeptogether and keeptogether(prev,start) then
+ if getfont(prev) == lastfont then
+ local prevchar = getchar(prev)
+ local lastchar = getchar(start)
+ if keeptogether and keeptogether(prev, start) then
-- keep 'm
elseif identifiers[lastfont] then
- local kerns = chardata[lastfont][prevchar].kerns
+ local kerns = chardata[lastfont] and chardata[lastfont][prevchar].kerns
local kern = kerns and kerns[lastchar] or 0
krn = kern + quaddata[lastfont]*krn -- here
insert_node_before(head,start,kern_injector(fillup,krn))
@@ -337,31 +398,34 @@ kerncharacters = function (head)
-- a bit too complicated, we can best not copy and just calculate
-- but we could have multiple glyphs involved so ...
local disc = prev -- disc
- local pre, post, replace = disc.pre, disc.post, disc.replace
- local prv, nxt = disc.prev, disc.next
+ local pre = getfield(disc, "pre")
+ local post = getfield(disc, "post")
+ local replace = getfield(disc, "replace")
+ local prv = getprev(disc)
+ local nxt = getnext(disc)
if pre and prv then -- must pair with start.prev
-- this one happens in most cases
local before = copy_node(prv)
- pre.prev = before
- before.next = pre
- before.prev = nil
+ setfield(pre, "prev", before)
+ setfield(before, "next", pre)
+ setfield(before, "prev", nil)
pre = kerncharacters (before)
- pre = pre.next
- pre.prev = nil
- disc.pre = pre
+ pre = getnext(pre)
+ setfield(pre, "prev", nil)
+ setfield(disc, "pre", pre)
free_node(before)
end
if post and nxt then -- must pair with start
local after = copy_node(nxt)
local tail = find_node_tail(post)
- tail.next = after
- after.prev = tail
- after.next = nil
+ setfield(tail, "next", after)
+ setfield(after, "prev", tail)
+ setfield(after, "next", nil)
post = kerncharacters (post)
- tail.next = nil
- disc.post = post
+ setfield(tail, "next", nil)
+ setfield(disc, "post", post)
free_node(after)
end
@@ -369,38 +433,42 @@ kerncharacters = function (head)
local before = copy_node(prv)
local after = copy_node(nxt)
local tail = find_node_tail(replace)
- replace.prev = before
- before.next = replace
- before.prev = nil
- tail.next = after
- after.prev = tail
- after.next = nil
+ setfield(replace, "prev", before)
+ setfield(before, "next", replace)
+ setfield(before, "prev", nil)
+ setfield(tail, "next", after)
+ setfield(after, "prev", tail)
+ setfield(after, "next", nil)
replace = kerncharacters (before)
- replace = replace.next
- replace.prev = nil
- after.prev.next = nil
- disc.replace = replace
+ replace = getnext(replace)
+ setfield(replace, "prev", nil)
+ setfield(after, "prev.next", nil)
+ setfield(disc, "replace", replace)
free_node(after)
free_node(before)
+
elseif identifiers[lastfont] then
- if prv and prv.id == glyph_code and prv.font == lastfont then
- local prevchar, lastchar = prv.char, start.char
- local kerns = chardata[lastfont][prevchar].kerns
+ if prv
+ and getid(prv) == glyph_code
+ and getfont(prv) == lastfont
+ then
+ local prevchar = getchar(prv)
+ local lastchar = getchar(start)
+ local kerns = chardata[lastfont] and chardata[lastfont][prevchar].kerns
local kern = kerns and kerns[lastchar] or 0
krn = kern + quaddata[lastfont]*krn -- here
else
krn = quaddata[lastfont]*krn -- here
end
- disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue
- end
-
- end
- end
- end
+ setfield(disc, "replace", kern_injector(false, krn))
+ end --[[if replace and prv and nxt]]
+ end --[[if not pid]]
+ end --[[if prev]]
+ end --[[if id == glyph_code]]
::nextnode::
if start then
- start = start.next
+ start = getnext(start)
end
end
return head, done
@@ -439,19 +507,40 @@ local remove_processor = function (name)
return false --> unregistered
end
---- now for the simplistic variant
+--- When font kerning is requested, usually by defining a font with the
+--- ``letterspace`` parameter, we inject a wrapper for the
+--- ``kerncharacters()`` node processor in the relevant callbacks. This
+--- wrapper initially converts the received head node into its “direct”
+--- counterpart. Likewise, the callback result is converted back to an
+--- ordinary node prior to returning. Internally, ``kerncharacters()``
+--- performs all node operations on direct nodes.
+
--- unit -> bool
local enablefontkerning = function ( )
- return add_processor( kerncharacters
+
+ local handler = function (hd)
+ local direct_hd = todirect (hd)
+ logreport ("term", 5, "letterspace",
+ "kerncharacters() invoked with node.direct interface \z
+ (``%s`` -> ``%s``)", tostring (hd), tostring (direct_hd))
+ local direct_hd, _done = kerncharacters (direct_hd)
+ if not direct_hd then --- bad
+ logreport ("both", 0, "letterspace",
+ "kerncharacters() failed to return a valid new head")
+ end
+ return tonode (direct_hd)
+ end
+
+ return add_processor( handler
, "luaotfload.letterspace"
, "pre_linebreak_filter"
, "hpack_filter")
end
--- unit -> bool
-local disablefontkerning = function ( )
- return remove_processor "luaotfload.letterspace"
-end
+---al disablefontkerning = function ( )
+---eturn remove_processor "luaotfload.letterspace"
+---
--[[doc--
@@ -515,10 +604,10 @@ otffeatures.register {
local initializecompatfontkerning = function (tfmdata, percentage)
local factor = tonumber (percentage)
if not factor then
- report ("both", 0, "letterspace",
- "Invalid argument to letterspace: %s (type %q), " ..
- "was expecting percentage as Lua number instead.",
- percentage, type (percentage))
+ logreport ("both", 0, "letterspace",
+ "Invalid argument to letterspace: %s (type %q), " ..
+ "was expecting percentage as Lua number instead.",
+ percentage, type (percentage))
return
end
return initializefontkerning (tfmdata, factor * 0.01)
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-loaders.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-loaders.lua
index 2aa8c7c4f92..0f22f46bb49 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-loaders.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-loaders.lua
@@ -1,30 +1,161 @@
-if not modules then modules = { } end modules ["loaders"] = {
- version = "2.5",
- comment = "companion to luaotfload-main.lua",
- author = "Hans Hagen, Khaled Hosny, Elie Roux, Philipp Gesang",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+#!/usr/bin/env texlua
+-----------------------------------------------------------------------
+-- FILE: luaotfload-loaders.lua
+-- DESCRIPTION: Luaotfload callback handling
+-- REQUIREMENTS: luatex v.0.80 or later; package lualibs
+-- AUTHOR: Philipp Gesang <phg@phi-gamma.net>
+-- AUTHOR: Hans Hagen, Khaled Hosny, Elie Roux, David Carlisle
+-----------------------------------------------------------------------
+--
+--- Contains parts of the earlier main script.
+
+if not lualibs then error "this module requires Luaotfload" end
+if not luaotfload then error "this module requires Luaotfload" end
+
+local logreport = luaotfload.log and luaotfload.log.report or print
+
+local install_formats = function ()
+ local fonts = fonts
+ if not fonts then return false end
+
+ local readers = fonts.readers
+ local handlers = fonts.handlers
+ local formats = fonts.formats
+ if not readers or not handlers or not formats then return false end
+
+ local aux = function (which, reader)
+ if not which or type (which) ~= "string"
+ or not reader or type (reader) ~= "function" then
+ logreport ("both", 2, "loaders", "Error installing reader for “%s”.", which)
+ return false
+ end
+ formats [which] = "type1"
+ readers [which] = reader
+ handlers [which] = { }
+ return true
+ end
+
+ return aux ("pfa", function (spec) return readers.opentype (spec, "pfa", "type1") end)
+ and aux ("pfb", function (spec) return readers.opentype (spec, "pfb", "type1") end)
+ and aux ("ofm", readers.tfm)
+end
+
+--[[doc--
+
+ \subsection{\CONTEXT override}
+ \label{define-font}
+ We provide a simplified version of the original font definition
+ callback.
+
+--doc]]--
-local fonts = fonts
-local readers = fonts.readers
-local handlers = fonts.handlers
-local formats = fonts.formats
-
-local pfb_reader = function (specification)
- return readers.opentype (specification, "pfb", "type1")
-end
-
-local pfa_reader = function (specification)
- return readers.opentype (specification, "pfa", "type1")
+
+local definers --- (string, spec -> size -> id -> tmfdata) hash_t
+do
+ local read = fonts.definers.read
+
+ local patch = function (specification, size, id)
+ local fontdata = read (specification, size, id)
+ if type (fontdata) == "table" and fontdata.shared then
+ --- We need to test for the “shared” field here
+ --- or else the fontspec capheight callback will
+ --- operate on tfm fonts.
+ luatexbase.call_callback ("luaotfload.patch_font", fontdata, specification)
+ else
+ luatexbase.call_callback ("luaotfload.patch_font_unsafe", fontdata, specification)
+ end
+ return fontdata
+ end
+
+ local mk_info = function (name)
+ local definer = name == "patch" and patch or read
+ return function (specification, size, id)
+ logreport ("both", 0, "loaders", "defining font no. %d", id)
+ logreport ("both", 0, "loaders", " > active font definer: %q", name)
+ logreport ("both", 0, "loaders", " > spec %q", specification)
+ logreport ("both", 0, "loaders", " > at size %.2f pt", size / 2^16)
+ local result = definer (specification, size, id)
+ if not result then
+ logreport ("both", 0, "loaders", " > font definition failed")
+ return
+ elseif type (result) == "number" then
+ logreport ("both", 0, "loaders", " > font definition yielded id %d", result)
+ return result
+ end
+ logreport ("both", 0, "loaders", " > font definition successful")
+ logreport ("both", 0, "loaders", " > name %q", result.name or "<nil>")
+ logreport ("both", 0, "loaders", " > fontname %q", result.fontname or "<nil>")
+ logreport ("both", 0, "loaders", " > fullname %q", result.fullname or "<nil>")
+ return result
+ end
+ end
+
+ definers = {
+ patch = patch,
+ generic = read,
+ info_patch = mk_info "patch",
+ info_generic = mk_info "generic",
+ }
end
-formats.pfa = "type1"
-readers.pfa = pfa_reader
-handlers.pfa = { }
+--[[doc--
+
+ We create callbacks for patching fonts on the fly, to be used by
+ other packages. In addition to the regular \identifier{patch_font}
+ callback there is an unsafe variant \identifier{patch_font_unsafe}
+ that will be invoked even if the target font lacks certain essential
+ tfmdata tables.
+
+ The callbacks initially contain the empty function that we are going
+ to override below.
-formats.pfb = "type1"
-readers.pfb = pfb_reader
-handlers.pfb = { }
+--doc]]--
--- vim:tw=71:sw=2:ts=2:expandtab
+local purge_define_font = function ()
+ local cdesc = luatexbase.callback_descriptions "define_font"
+ --- define_font is an “exclusive” callback, meaning that there can
+ --- only ever be one entry. Everything beyond that would indicate
+ --- that something is broken.
+ local _, d = next (cdesc)
+ if d then
+ local i, d2 = next (cdesc, 1)
+ if d2 then --> issue warning
+ logreport ("both", 0, "loaders",
+ "Callback table for define_font contains multiple entries: \z
+ { [%d] = “%s” } -- seems fishy.", i, d2)
+ end
+ logreport ("log", 0, "loaders",
+ "Entry ``%s`` present in define_font callback; overriding.", d)
+ luatexbase.remove_from_callback ("define_font", d)
+ end
+end
+
+local install_callbacks = function ()
+ local create_callback = luatexbase.create_callback
+ local dummy_function = function () end
+ create_callback ("luaotfload.patch_font", "simple", dummy_function)
+ create_callback ("luaotfload.patch_font_unsafe", "simple", dummy_function)
+ purge_define_font ()
+ local definer = config.luaotfload.run.definer
+ luatexbase.add_to_callback ("define_font",
+ definers[definer or "patch"],
+ "luaotfload.define_font",
+ 1)
+ return true
+end
+
+return {
+ init = function ()
+ local ret = true
+ if not install_formats () then
+ logreport ("log", 0, "loaders", "Error initializing OFM/PF{A,B} loaders.")
+ ret = false
+ end
+ if not install_callbacks () then
+ logreport ("log", 0, "loaders", "Error installing font loader callbacks.")
+ ret = false
+ end
+ return ret
+ end
+}
+-- vim:tw=79:sw=2:ts=2:expandtab
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-log.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-log.lua
index a0e78bde9cc..5c678a56b44 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-log.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-log.lua
@@ -1,5 +1,5 @@
if not modules then modules = { } end modules ["luaotfload-log"] = {
- version = "2.5",
+ version = "2.6",
comment = "companion to Luaotfload",
author = "Khaled Hosny, Elie Roux, Philipp Gesang",
copyright = "Luaotfload Development Team",
@@ -91,7 +91,7 @@ local set_logout = function (s, finalizers)
logout = "redirect"
local chan = choose_logfile ()
chan:write (stringformat ("logging initiated at %s",
- osdate ("%Y-%m-%d %h:%m:%s", --- i. e. osdate "%F %T"
+ osdate ("%Y-%m-%d %H:%M:%S", --- i. e. osdate "%F %T"
ostime ())))
local writefile = function (...)
if select ("#", ...) == 2 then
@@ -118,7 +118,7 @@ local set_logout = function (s, finalizers)
finalizers[#finalizers+1] = function ()
chan:write (stringformat ("\nlogging finished at %s\n",
- osdate ("%Y-%m-%d %h:%m:%s", --- i. e. osdate "%F %T"
+ osdate ("%Y-%m-%d %H:%M:%S", --- i. e. osdate "%F %T"
ostime ())))
chan:close ()
texiowrite = texio.write
@@ -355,52 +355,3 @@ end
texio.reporter = texioreporter
---[[doc--
-
- Adobe Glyph List.
- -------------------------------------------------------------------
-
- Context provides a somewhat different font-age.lua from an unclear
- origin. Unfortunately, the file name it reads from is hard-coded
- in font-enc.lua, so we have to replace the entire table.
-
- This shouldn’t cause any complications. Due to its implementation
- the glyph list will be loaded upon loading a OTF or TTF for the
- first time during a TeX run. (If one sticks to TFM/OFM then it is
- never read at all.) For this reason we can install a metatable that
- looks up the file of our choosing and only falls back to the
- Context one in case it cannot be found.
-
---doc]]--
-
-if fonts then --- need to be running TeX
- if next(fonts.encodings.agl) then
- --- unnecessary because the file shouldn’t be loaded at this time
- --- but we’re just making sure
- fonts.encodings.agl = nil
- collectgarbage"collect"
- end
-
-
- fonts.encodings.agl = { }
-
- setmetatable(fonts.encodings.agl, { __index = function (t, k)
- if k == "unicodes" then
- local glyphlist = resolvers.findfile"luaotfload-glyphlist.lua"
- if glyphlist then
- report ("log", 1, "load", "loading the Adobe glyph list")
- else
- glyphlist = resolvers.findfile"font-age.lua"
- report ("both", 0, "load",
- "loading the extended glyph list from ConTeXt")
- end
- local unicodes = dofile(glyphlist)
- fonts.encodings.agl = { unicodes = unicodes }
- return unicodes
- else
- return nil
- end
- end })
-end
-
--- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-main.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-main.lua
index ed88f791186..9525ca4b9d4 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-main.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-main.lua
@@ -1,26 +1,41 @@
-----------------------------------------------------------------------
-- FILE: luaotfload-main.lua
--- DESCRIPTION: Luatex fontloader initialization
--- REQUIREMENTS: luatex v.0.79 or later; packages lualibs, luatexbase
+-- DESCRIPTION: Luaotfload entry point
+-- REQUIREMENTS: luatex v.0.80 or later; packages lualibs
-- AUTHOR: Élie Roux, Khaled Hosny, Philipp Gesang
--- VERSION: same as Luaotfload
--- MODIFIED: 2014-08-10 23:12:07+0200
-----------------------------------------------------------------------
--
---- Note:
---- This file was part of the original luaotfload.dtx and has been
---- converted to a pure Lua file during the transition from Luaotfload
---- version 2.4 to 2.5. Thus, the comments are still in TeX (Latex)
---- markup.
-
-if not modules then modules = { } end modules ["luaotfload-main"] = {
- version = "2.5",
- comment = "fontloader initialization",
- author = "Hans Hagen, Khaled Hosny, Elie Roux, Philipp Gesang",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "GNU General Public License v. 2.0"
-}
+local osgettimeofday = os.gettimeofday
+config = config or { }
+luaotfload = luaotfload or { }
+local luaotfload = luaotfload
+luaotfload.log = luaotfload.log or { }
+luaotfload.version = "2.6"
+luaotfload.loaders = { }
+luaotfload.min_luatex_version = 80 --- i. e. 0.79
+luaotfload.fontloader_package = "reference" --- default: from current Context
+
+local authors = "\z
+ Hans Hagen,\z
+ Khaled Hosny,\z
+ Elie Roux,\z
+ Will Robertson,\z
+ Philipp Gesang,\z
+ Dohyun Kim,\z
+ Reuben Thomas\z
+"
+
+
+luaotfload.module = {
+ name = "luaotfload-main",
+ version = 2.60001,
+ date = "2015/12/09",
+ description = "OpenType layout system.",
+ author = authors,
+ copyright = authors,
+ license = "GPL v2.0"
+}
--[[doc--
@@ -43,49 +58,11 @@ if not modules then modules = { } end modules ["luaotfload-main"] = {
--doc]]--
-local initial_log_level = 0
-
-luaotfload = luaotfload or { }
-local luaotfload = luaotfload
-luaotfload.log = luaotfload.log or { }
-luaotfload.version = "2.5-4" -- FIXME version belongs in common init
-
-luaotfload.module = {
- name = "luaotfload-main",
- version = 2.50003,
- date = "2014/08/10",
- description = "OpenType layout system.",
- author = "Elie Roux & Hans Hagen",
- copyright = "Elie Roux",
- license = "GPL v2.0"
-}
-
local luatexbase = luatexbase
+local require = require
+local type = type
-local setmetatable = setmetatable
-local type, next = type, next
-local stringlower = string.lower
-local stringformat = string.format
-
-local kpsefind_file = kpse.find_file
-local lfsisfile = lfs.isfile
-
-local add_to_callback = luatexbase.add_to_callback
-local create_callback = luatexbase.create_callback
-local reset_callback = luatexbase.reset_callback
-local call_callback = luatexbase.call_callback
-
-local dummy_function = function () end --- XXX this will be moved to the luaotfload namespace when we have the init module
-
-local error, warning, info, log =
- luatexbase.provides_module(luaotfload.module)
-
-luaotfload.log.tex = {
- error = error,
- warning = warning,
- info = info,
- log = log,
-}
+luatexbase.provides_module (luaotfload.module)
--[[doc--
@@ -101,609 +78,190 @@ luaotfload.log.tex = {
--doc]]--
-local min_luatex_version = 79
-
-if tex.luatexversion < min_luatex_version then
+if tex.luatexversion < luaotfload.min_luatex_version then
warning ("LuaTeX v%.2f is old, v%.2f or later is recommended.",
tex.luatexversion / 100,
- min_luatex_version / 100)
+ luaotfload.min_luatex_version / 100)
+ warning ("using fallback fontloader -- newer functionality not available")
+ luaotfload.fontloader_package = "tl2014" --- TODO fallback should be configurable too
--- we install a fallback for older versions as a safety
- if not node.end_of_math then
- local math_t = node.id "math"
- local traverse_nodes = node.traverse_id
- node.end_of_math = function (n)
- for n in traverse_nodes (math_t, n.next) do
- return n
- end
- end
- end
end
--[[doc--
\subsection{Module loading}
- We load the files imported from \CONTEXT with this function. It
- automatically prepends the prefix \fileent{luaotfload-} to its
- argument, so we can refer to the files with their actual \CONTEXT
- name.
-
---doc]]--
-
-local fl_prefix = "luaotfload" -- “luatex” for luatex-plain
-local loadmodule = function (name)
- require (fl_prefix .."-"..name)
-end
-
-loadmodule "log.lua" --- log messages
---loadmodule "parsers.lua" --- new in 2.5; fonts.conf and syntax
---loadmodule "configuration.lua" --- configuration options
-
-local log = luaotfload.log
-local logreport = log.report
-
-log.set_loglevel (default_log_level)
-
---[[doc--
-
- Before \TeX Live 2013 version, \LUATEX had a bug that made ofm fonts
- fail when called with their extension. There was a side-effect making
- ofm totally unloadable when luaotfload was present. The following
- lines are a patch for this bug. The utility of these lines is
- questionable as they are not necessary since \TeX Live 2013. They
- should be removed in the next version.
+ We load the files imported from \CONTEXT with function derived this way. It
+ automatically prepends a prefix to its argument, so we can refer to the
+ files with their actual \CONTEXT name.
--doc]]--
-local Cs, P, lpegmatch = lpeg.Cs, lpeg.P, lpeg.match
-
-local p_dot, p_slash = P".", P"/"
-local p_suffix = (p_dot * (1 - p_dot - p_slash)^1 * P(-1)) / ""
-local p_removesuffix = Cs((p_suffix + 1)^1)
-
-local find_vf_file = function (name)
- local fullname = kpsefind_file(name, "ovf")
- if not fullname then
- --fullname = kpsefind_file(file.removesuffix(name), "ovf")
- fullname = kpsefind_file(lpegmatch(p_removesuffix, name), "ovf")
+local make_loader_name = function (prefix, name)
+ local msg = luaotfload.log and luaotfload.log.report
+ or function (stream, lvl, cat, ...)
+ if lvl > 1 then --[[not pressing]] return end
+ texio.write_nl ("log",
+ string.format ("luaotfload | %s : ",
+ tostring (cat)))
+ texio.write (string.format (...))
+ end
+ if not name then
+ msg ("both", 0, "load",
+ "Fatal error: make_loader_name (“%s”, “%s”).",
+ tostring (prefix), tostring (name))
+ return "dummy-name"
end
- if fullname then
- logreport ("log", 0, "main",
- "loading virtual font file %s.", fullname)
+ name = tostring (name)
+ if prefix == false then
+ msg ("log", 9, "load",
+ "No prefix requested, passing module name “%s” unmodified.",
+ name)
+ return tostring (name) .. ".lua"
end
- return fullname
+ prefix = tostring (prefix)
+ msg ("log", 9, "load",
+ "Composing module name from constituents %s, %s.",
+ prefix, name)
+ return prefix .. "-" .. name .. ".lua"
end
---[[doc--
-
- \subsection{Preparing the Font Loader}
- We treat the fontloader as a black box so behavior is consistent
- between formats.
- We load the fontloader code directly in the same fashion as the
- Plain format \identifier{luatex-fonts} that is part of Context.
- How this is executed depends on the presence on the
- \emphasis{merged font loader code}.
- In \identifier{luaotfload} this is contained in the file
- \fileent{luaotfload-merged.lua}.
- If this file cannot be found, the original libraries from \CONTEXT
- of which the merged code was composed are loaded instead.
- Since these files are not shipped with Luaotfload, an installation
- of Context is required.
- (Since we pull the fontloader directly from the Context minimals,
- the necessary Context version is likely to be more recent than that
- of other TeX distributions like Texlive.)
- The imported font loader will call \luafunction{callback.register}
- once while reading \fileent{font-def.lua}.
- This is unavoidable unless we modify the imported files, but
- harmless if we make it call a dummy instead.
- However, this problem might vanish if we decide to do the merging
- ourselves, like the \identifier{lualibs} package does.
- With this step we would obtain the freedom to load our own
- overrides in the process right where they are needed, at the cost
- of losing encapsulation.
- The decision on how to progress is currently on indefinite hold.
-
---doc]]--
-
-local starttime = os.gettimeofday ()
-local trapped_register = callback.register
-callback.register = dummy_function
-
---[[doc--
-
- By default, the fontloader requires a number of \emphasis{private
- attributes} for internal use.
- These must be kept consistent with the attribute handling methods
- as provided by \identifier{luatexbase}.
- Our strategy is to override the function that allocates new
- attributes before we initialize the font loader, making it a
- wrapper around \luafunction{luatexbase.new_attribute}.\footnote{%
- Many thanks, again, to Hans Hagen for making this part
- configurable!
- }
- The attribute identifiers are prefixed “\fileent{luaotfload@}” to
- avoid name clashes.
-
---doc]]--
-
-do
- local new_attribute = luatexbase.new_attribute
- local the_attributes = luatexbase.attributes
-
- attributes = attributes or { }
+local timing_info = {
+ t_load = { },
+ t_init = { },
+}
- attributes.private = function (name)
- local attr = "luaotfload@" .. name --- used to be: “otfl@”
- local number = the_attributes[attr]
- if not number then
- number = new_attribute(attr)
- end
- return number
+local make_loader = function (prefix)
+ return function (name)
+ local t_0 = osgettimeofday ()
+ local modname = make_loader_name (prefix, name)
+ local data = require (modname)
+ local t_end = osgettimeofday ()
+ timing_info.t_load [name] = t_end - t_0
+ return data
end
end
--[[doc--
-
- These next lines replicate the behavior of
- \fileent{luatex-fonts.lua}.
-
+ Certain files are kept around that aren’t loaded because they are part of
+ the imported fontloader. In order to keep the initialization structure
+ intact we also provide a no-op version of the module loader that can be
+ called in the expected places.
--doc]]--
-local context_environment = { }
-
-local push_namespaces = function ()
- logreport ("log", 1, "main", "push namespace for font loader")
- local normalglobal = { }
- for k, v in next, _G do
- normalglobal[k] = v
- end
- return normalglobal
+local dummy_loader = function (name)
+ luaotfload.log.report ("log", 3, "load",
+ "Skipping module “%s” on purpose.",
+ name)
end
-local pop_namespaces = function (normalglobal, isolate)
- if normalglobal then
- local _G = _G
- local mode = "non-destructive"
- if isolate then mode = "destructive" end
- logreport ("log", 1, "main", "pop namespace from font loader -- " .. mode)
- for k, v in next, _G do
- if not normalglobal[k] then
- context_environment[k] = v
- if isolate then
- _G[k] = nil
- end
- end
- end
- for k, v in next, normalglobal do
- _G[k] = v
+local context_loader = function (name, path)
+ luaotfload.log.report ("log", 3, "load",
+ "Loading module “%s” from Context.",
+ name)
+ local t_0 = osgettimeofday ()
+ local modname = make_loader_name (false, name)
+ local modpath = modname
+ if path then
+ if lfs.isdir (path) then
+ luaotfload.log.report ("log", 3, "load",
+ "Prepending path “%s”.",
+ path)
+ modpath = file.join (path, modname)
+ else
+ luaotfload.log.report ("both", 0, "load",
+ "Non-existant path “%s” specified, ignoring.",
+ path)
end
- -- just to be sure:
- setmetatable(context_environment,_G)
- else
- logreport ("both", 0, "main",
- "irrecoverable error during pop_namespace: no globals to restore")
- os.exit()
end
-end
-
-luaotfload.context_environment = context_environment
-luaotfload.push_namespaces = push_namespaces
-luaotfload.pop_namespaces = pop_namespaces
-
-local our_environment = push_namespaces()
-
---[[doc--
-
- The font loader requires that the attribute with index zero be
- zero. We happily oblige.
- (Cf. \fileent{luatex-fonts-nod.lua}.)
-
---doc]]--
-
-tex.attribute[0] = 0
-
---[[doc--
-
- Now that things are sorted out we can finally load the fontloader.
-
---doc]]--
-
-loadmodule "fontloader.lua"
----loadmodule"font-odv.lua" --- <= Devanagari support from Context
-
-if fonts then
-
- if not fonts._merge_loaded_message_done_ then
- logreport ("log", 5, "main", [["I am using the merged fontloader here.]])
- logreport ("log", 5, "main", [[ If you run into problems or experience unexpected]])
- logreport ("log", 5, "main", [[ behaviour, and if you have ConTeXt installed you can try]])
- logreport ("log", 5, "main", [[ to delete the file 'luaotfload-fontloader.lua' as I might]])
- logreport ("log", 5, "main", [[ then use the possibly updated libraries. The merged]])
- logreport ("log", 5, "main", [[ version is not supported as it is a frozen instance.]])
- logreport ("log", 5, "main", [[ Problems can be reported to the ConTeXt mailing list."]])
+ local ret = require (modpath)
+ local t_end = osgettimeofday ()
+ timing_info.t_load [name] = t_end - t_0
+
+ if ret ~= true then
+ --- require () returns “true” upon success unless the loaded file
+ --- yields a non-zero exit code. This isn’t per se indicating that
+ --- something isn’t right, but against HH’s coding practices. We’ll
+ --- silently ignore this ever happening on lower log levels.
+ luaotfload.log.report ("log", 4, "load",
+ "Module “%s” returned “%s”.", ret)
end
- fonts._merge_loaded_message_done_ = true
-
-else--- the loading sequence is known to change, so this might have to
- --- be updated with future updates!
- --- do not modify it though unless there is a change to the merged
- --- package!
- loadmodule("l-lua.lua")
- loadmodule("l-lpeg.lua")
- loadmodule("l-function.lua")
- loadmodule("l-string.lua")
- loadmodule("l-table.lua")
- loadmodule("l-io.lua")
- loadmodule("l-file.lua")
- loadmodule("l-boolean.lua")
- loadmodule("l-math.lua")
- loadmodule("util-str.lua")
- loadmodule('luatex-basics-gen.lua')
- loadmodule('data-con.lua')
- loadmodule('luatex-basics-nod.lua')
- loadmodule('font-ini.lua')
- loadmodule('font-con.lua')
- loadmodule('luatex-fonts-enc.lua')
- loadmodule('font-cid.lua')
- loadmodule('font-map.lua')
- loadmodule('luatex-fonts-syn.lua')
- loadmodule('luatex-fonts-tfm.lua')
- loadmodule('font-oti.lua')
- loadmodule('font-otf.lua')
- loadmodule('font-otb.lua')
- loadmodule('luatex-fonts-inj.lua') --> since 2014-01-07, replaces node-inj.lua
- loadmodule('font-ota.lua')
- loadmodule('luatex-fonts-otn.lua') --> since 2014-01-07, replaces font-otn.lua
- loadmodule('font-otp.lua') --> since 2013-04-23
- loadmodule('luatex-fonts-lua.lua')
- loadmodule('font-def.lua')
- loadmodule('luatex-fonts-def.lua')
- loadmodule('luatex-fonts-ext.lua')
- loadmodule('luatex-fonts-cbk.lua')
-end --- non-merge fallback scope
-
---[[doc--
-
- Here we adjust the globals created during font loader
- initialization. If the second argument to
- \luafunction{pop_namespaces()} is \verb|true| this will restore the
- state of \luafunction{_G}, eliminating every global generated since
- the last call to \luafunction{push_namespaces()}. At the moment we
- see no reason to do this, and since the font loader is considered
- an essential part of \identifier{luatex} as well as a very well
- organized piece of code, we happily concede it the right to add to
- \luafunction{_G} if needed.
-
---doc]]--
-
-pop_namespaces(our_environment, false)-- true)
-
-logreport ("both", 0, "main",
- "fontloader loaded in %0.3f seconds", os.gettimeofday()-starttime)
-
---[[doc--
-
- \subsection{Callbacks}
- After the fontloader is ready we can restore the callback trap from
- \identifier{luatexbase}.
-
---doc]]--
-
-callback.register = trapped_register
-
---[[doc--
-
- We do our own callback handling with the means provided by
- luatexbase.
- Note: \luafunction{pre_linebreak_filter} and
- \luafunction{hpack_filter} are coupled in \CONTEXT in the concept
- of \emphasis{node processor}.
-
---doc]]--
-
-add_to_callback("pre_linebreak_filter",
- nodes.simple_font_handler,
- "luaotfload.node_processor",
- 1)
-add_to_callback("hpack_filter",
- nodes.simple_font_handler,
- "luaotfload.node_processor",
- 1)
-add_to_callback("find_vf_file",
- find_vf_file, "luaotfload.find_vf_file")
-
-loadmodule "override.lua" --- load glyphlist on demand
-
---[[doc--
-
- Now we load the modules written for \identifier{luaotfload}.
-
---doc]]--
-
-loadmodule "parsers.lua" --- fonts.conf and syntax
-loadmodule "configuration.lua" --- configuration options
-
-if not config.actions.apply_defaults () then
- logreport ("log", 0, "load", "Configuration unsuccessful.")
+ return ret
end
-loadmodule "loaders.lua" --- Type1 font wrappers
-loadmodule "database.lua" --- Font management.
-loadmodule "colors.lua" --- Per-font colors.
-
-if not config.actions.reconfigure () then
- logreport ("log", 0, "load", "Post-configuration hooks failed.")
-end
-
---[[doc--
-
- Relying on the \verb|name:| resolver for everything has been the
- source of permanent trouble with the database.
- With the introduction of the new syntax parser we now have enough
- granularity to distinguish between the \XETEX emulation layer and
- the genuine \verb|name:| and \verb|file:| lookups of \LUATEX-Fonts.
- Another benefit is that we can now easily plug in or replace new
- lookup behaviors if necessary.
- The name resolver remains untouched, but it calls
- \luafunction{fonts.names.resolve()} internally anyways (see
- \fileent{luaotfload-database.lua}).
-
---doc]]--
-
-local filesuffix = file.suffix
-local fileremovesuffix = file.removesuffix
-local request_resolvers = fonts.definers.resolvers
-local formats = fonts.formats
-local names = fonts.names
-formats.ofm = "type1"
-
-fonts.encodings.known = fonts.encodings.known or { }
-
---[[doc--
-
- \identifier{luaotfload} promises easy access to system fonts.
- Without additional precautions, this cannot be achieved by
- \identifier{kpathsea} alone, because it searches only the
- \fileent{texmf} directories by default.
- Although it is possible for \identifier{kpathsea} to include extra
- paths by adding them to the \verb|OSFONTDIR| environment variable,
- this is still short of the goal »\emphasis{it just works!}«.
- When building the font database \identifier{luaotfload} scans
- system font directories anyways, so we already have all the
- information for looking sytem fonts.
- With the release version 2.2 the file names are indexed in the
- database as well and we are ready to resolve \verb|file:| lookups
- this way.
- Thus we no longer need to call the \identifier{kpathsea} library in
- most cases when looking up font files, only when generating the
- database, and when verifying the existence of a file in the
- \fileent{texmf} tree.
-
---doc]]--
-
-local resolve_file = names.font_file_lookup
-
-local file_resolver = function (specification)
- local name = resolve_file (specification.name)
- local suffix = filesuffix(name)
- if formats[suffix] then
- specification.forced = stringlower (suffix)
- specification.forcedname = file.removesuffix(name)
- else
- specification.name = name
- end
-end
-
-request_resolvers.file = file_resolver
-
---[[doc--
-
- We classify as \verb|anon:| those requests that have neither a
- prefix nor brackets. According to Khaled\footnote{%
- \url{https://github.com/phi-gamma/luaotfload/issues/4#issuecomment-17090553}.
- }
- they are the \XETEX equivalent of a \verb|name:| request, so we
- will be treating them as such.
-
---doc]]--
-
---request_resolvers.anon = request_resolvers.name
-
---[[doc--
-
- There is one drawback, though.
- This syntax is also used for requesting fonts in \identifier{Type1}
- (\abbrev{tfm}, \abbrev{ofm}) format.
- These are essentially \verb|file:| lookups and must be caught
- before the \verb|name:| resolver kicks in, lest they cause the
- database to update.
- Even if we were to require the \verb|file:| prefix for all
- \identifier{Type1} requests, tests have shown that certain fonts
- still include further fonts (e.~g. \fileent{omlgcb.ofm} will ask
- for \fileent{omsecob.tfm}) \emphasis{using the old syntax}.
- For this reason, we introduce an extra check with an early return.
-
---doc]]--
-
-local type1_formats = { "tfm", "ofm", "TFM", "OFM", }
-
-request_resolvers.anon = function (specification)
- local name = specification.name
- for i=1, #type1_formats do
- local format = type1_formats[i]
- if resolvers.findfile(name, format) then
- specification.forcedname = file.addsuffix(name, format)
- specification.forced = format
- return
+local install_loaders = function ()
+ local loaders = { }
+ local loadmodule = make_loader "luaotfload"
+ loaders.luaotfload = loadmodule
+ loaders.fontloader = make_loader "fontloader"
+ loaders.context = context_loader
+ loaders.ignore = dummy_loader
+----loaders.plaintex = make_loader "luatex" --=> for Luatex-Plain
+
+ loaders.initialize = function (name)
+ local tmp = loadmodule (name)
+ local logreport = luaotfload.log.report
+ if type (tmp) == "table" then
+ local init = tmp.init
+ if init and type (init) == "function" then
+ local t_0 = osgettimeofday ()
+ if not init () then
+ logreport ("log", 0, "load",
+ "Failed to load module “%s”.", name)
+ return
+ end
+ local t_end = osgettimeofday ()
+ local d_t = t_end - t_0
+ logreport ("log", 4, "load",
+ "Module “%s” loaded in %d ms.",
+ name, d_t)
+ timing_info.t_init [name] = d_t
+ end
end
end
- --- under some weird circumstances absolute paths get
- --- passed to the definer; we have to catch them
- --- before the name: resolver misinterprets them.
- name = specification.specification
- local exists, _ = lfsisfile(name)
- if exists then --- garbage; we do this because we are nice,
- --- not because it is correct
- logreport ("log", 1, "load", "file %q exists", name)
- logreport ("log", 1, "load",
- "... overriding borked anon: lookup with path: lookup")
- specification.name = name
- request_resolvers.path(specification)
- return
- end
- request_resolvers.name(specification)
-end
-
---[[doc--
-
- Prior to version 2.2, \identifier{luaotfload} did not distinguish
- \verb|file:| and \verb|path:| lookups, causing complications with
- the resolver.
- Now we test if the requested name is an absolute path in the file
- system, otherwise we fall back to the \verb|file:| lookup.
-
---doc]]--
-request_resolvers.path = function (specification)
- local name = specification.name
- local exists, _ = lfsisfile(name)
- if not exists then -- resort to file: lookup
- logreport ("log", 0, "load",
- "path lookup of %q unsuccessful, falling back to file:",
- name)
- file_resolver (specification)
- else
- local suffix = filesuffix (name)
- if formats[suffix] then
- specification.forced = stringlower (suffix)
- specification.name = file.removesuffix(name)
- specification.forcedname = name
- else
- specification.name = name
- end
- end
+ return loaders
end
---[[doc--
-
- {\bfseries EXPERIMENTAL}:
- \identifier{kpse}-only resolver, for those who can do without
- system fonts.
+luaotfload.main = function ()
---doc]]--
+ luaotfload.loaders = install_loaders ()
+ local loaders = luaotfload.loaders
+ local loadmodule = loaders.luaotfload
+ local initialize = loaders.initialize
-request_resolvers.kpse = function (specification)
- local name = specification.name
- local suffix = filesuffix(name)
- if suffix and formats[suffix] then
- name = file.removesuffix(name)
- if resolvers.findfile(name, suffix) then
- specification.forced = stringlower (suffix)
- specification.forcedname = name
- return
- end
- end
- for t, format in next, formats do --- brute force
- if kpse.find_file (name, format) then
- specification.forced = t
- specification.name = name
- return
- end
- end
-end
+ local starttime = osgettimeofday ()
+ local init = loadmodule "init" --- fontloader initialization
+ local store = init.early () --- injects the log module too
+ local logreport = luaotfload.log.report
---[[doc--
+ initialize "parsers" --- fonts.conf and syntax
+ initialize "configuration" --- configuration options
- The \verb|name:| resolver.
+ if not init.main (store) then
+ logreport ("log", 0, "load", "Main fontloader initialization failed.")
+ end
---doc]]--
+ initialize "loaders" --- Font loading; callbacks
+ initialize "database" --- Font management.
+ initialize "colors" --- Per-font colors.
---- fonts.names.resolvers.name -- Customized version of the
---- generic name resolver.
+ luaotfload.resolvers = loadmodule "resolvers" --- Font lookup
+ luaotfload.resolvers.init ()
-request_resolvers.name = function (specification)
- local resolver = names.resolve_cached
- if config.luaotfload.run.resolver == "normal" then
- resolver = names.resolve_name
- end
- local resolved, subfont = resolver (specification)
- if resolved then
- logreport ("log", 0, "load", "Lookup/name: %q -> \"%s%s\"",
- specification.name,
- resolved,
- subfont and stringformat ("(%d)", subfont) or "")
- specification.resolved = resolved
- specification.sub = subfont
- specification.forced = stringlower (filesuffix (resolved) or "")
- specification.forcedname = resolved
- specification.name = fileremovesuffix (resolved)
- else
- file_resolver (specification)
+ if not config.actions.reconfigure () then
+ logreport ("log", 0, "load", "Post-configuration hooks failed.")
end
-end
-
---[[doc--
- Also {\bfseries EXPERIMENTAL}: custom file resolvers via callback.
+ initialize "features" --- font request and feature handling
+ loadmodule "letterspace" --- extra character kerning
+ loadmodule "auxiliary" --- additional high-level functionality
---doc]]--
-create_callback("luaotfload.resolve_font", "simple", dummy_function)
+ luaotfload.aux.start_rewrite_fontname () --- to be migrated to fontspec
-request_resolvers.my = function (specification)
- call_callback("luaotfload.resolve_font", specification)
+ logreport ("both", 0, "main",
+ "initialization completed in %0.3f seconds",
+ osgettimeofday() - starttime)
+----inspect (timing_info)
end
---[[doc--
-
- We create a callback for patching fonts on the fly, to be used by
- other packages.
- It initially contains the empty function that we are going to
- override below.
-
---doc]]--
-
-create_callback("luaotfload.patch_font", "simple", dummy_function)
-
---[[doc--
-
- \subsection{\CONTEXT override}
- \label{define-font}
- We provide a simplified version of the original font definition
- callback.
-
---doc]]--
-
-local read_font_file = fonts.definers.read
-
-local definers = {
- generic = read_font_file,
- --- spec -> size -> id -> tmfdata
- patch = function (specification, size, id)
- local tfmdata = read_font_file (specification, size, id)
- if type (tfmdata) == "table" and tfmdata.shared then
- --- We need to test for the “shared” field here
- --- or else the fontspec capheight callback will
- --- operate on tfm fonts.
- call_callback ("luaotfload.patch_font", tfmdata, specification)
- end
- return tfmdata
- end,
-}
-
-reset_callback "define_font"
-
---[[doc--
-
- Finally we register the callbacks.
-
---doc]]--
-
-local definer = config.luaotfload.run.definer
-add_to_callback ("define_font", definers[definer], "luaotfload.define_font", 1)
-
-loadmodule "features.lua" --- font request and feature handling
-loadmodule "letterspace.lua" --- extra character kerning
-loadmodule "auxiliary.lua" --- additional high-level functionality
-
-luaotfload.aux.start_rewrite_fontname () --- to be migrated to fontspec
-
-- vim:tw=79:sw=4:ts=4:et
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-override.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-override.lua
deleted file mode 100644
index b75530b5cdf..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-override.lua
+++ /dev/null
@@ -1,52 +0,0 @@
-if not modules then modules = { } end modules ["luaotfload-override"] = {
- version = "2.5",
- comment = "companion to Luaotfload",
- author = "Khaled Hosny, Elie Roux, Philipp Gesang",
- copyright = "Luaotfload Development Team",
- license = "GNU GPL v2.0"
-}
-
-local findfile = resolvers.findfile
-local encodings = fonts.encodings
-
-local log = luaotfload.log
-local report = log.report
-
---[[doc--
-
- Adobe Glyph List.
- -------------------------------------------------------------------
-
- Context provides a somewhat different font-age.lua from an unclear
- origin. Unfortunately, the file name it reads from is hard-coded
- in font-enc.lua, so we have to replace the entire table.
-
- This shouldn’t cause any complications. Due to its implementation
- the glyph list will be loaded upon loading a OTF or TTF for the
- first time during a TeX run. (If one sticks to TFM/OFM then it is
- never read at all.) For this reason we can install a metatable that
- looks up the file of our choosing and only falls back to the
- Context one in case it cannot be found.
-
---doc]]--
-
-encodings.agl = { }
-
-setmetatable(fonts.encodings.agl, { __index = function (t, k)
- if k ~= "unicodes" then
- return nil
- end
- local glyphlist = findfile "luaotfload-glyphlist.lua"
- if glyphlist then
- report ("log", 1, "load", "loading the Adobe glyph list")
- else
- glyphlist = findfile "font-age.lua"
- report ("both", 0, "load",
- "loading the extended glyph list from ConTeXt")
- end
- local unicodes = dofile(glyphlist)
- encodings.agl = { unicodes = unicodes }
- return unicodes
-end })
-
--- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-package.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-package.lua
new file mode 100644
index 00000000000..524dbcb05e8
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-package.lua
@@ -0,0 +1,99 @@
+--
+-----------------------------------------------------------------------
+-- FILE: luaotfload-package.lua
+-- DESCRIPTION: Luatex fontloader packaging
+-- REQUIREMENTS: luatex, mtx-package
+-- AUTHOR: Philipp Gesang
+-- LICENSE: GNU GPL v2.0
+-----------------------------------------------------------------------
+--
+
+--- The original initialization sequence by Hans Hagen, see the file
+--- luatex-fonts.lua for details:
+---
+--- [01] l-lua.lua
+--- [02] l-lpeg.lua
+--- [03] l-function.lua
+--- [04] l-string.lua
+--- [05] l-table.lua
+--- [06] l-io.lua
+--- [07] l-file.lua
+--- [08] l-boolean.lua
+--- [09] l-math.lua
+--- [10] util-str.lua
+--- [11] luatex-basics-gen.lua
+--- [12] data-con.lua
+--- [13] luatex-basics-nod.lua
+--- [14] font-ini.lua
+--- [15] font-con.lua
+--- [16] luatex-fonts-enc.lua
+--- [17] font-cid.lua
+--- [18] font-map.lua
+--- [19] luatex-fonts-syn.lua
+--- [20] font-tfm.lua
+--- [21] font-afm.lua
+--- [22] font-afk.lua
+--- [23] luatex-fonts-tfm.lua
+--- [24] font-oti.lua
+--- [25] font-otf.lua
+--- [26] font-otb.lua
+--- [27] luatex-fonts-inj.lua
+--- [28] luatex-fonts-ota.lua
+--- [29] luatex-fonts-otn.lua
+--- [30] font-otp.lua
+--- [31] luatex-fonts-lua.lua
+--- [32] font-def.lua
+--- [33] luatex-fonts-def.lua
+--- [34] luatex-fonts-ext.lua
+--- [35] luatex-fonts-cbk.lua
+---
+--- Of these, nos. 01--10 are provided by the Lualibs. Keeping them
+--- around in the Luaotfload fontloader is therefore unnecessary.
+--- Packaging needs to account for this difference.
+
+loadmodule "l-lua.lua"
+loadmodule "l-lpeg.lua"
+loadmodule "l-function.lua"
+loadmodule "l-string.lua"
+loadmodule "l-table.lua"
+loadmodule "l-io.lua"
+loadmodule "l-file.lua"
+loadmodule "l-boolean.lua"
+loadmodule "l-math.lua"
+loadmodule "util-str.lua"
+
+--- Another file containing auxiliary definitions must be present
+--- prior to initialization of the configuration.
+
+loadmodule "luatex-basics-gen.lua"
+
+--- The files below constitute the “fontloader proper”. Some of the
+--- functionality like file resolvers is overloaded later by
+--- Luaotfload. Consequently, the resulting package is pretty
+--- bare-bones and not usable independently.
+
+loadmodule("data-con.lua")
+loadmodule("luatex-basics-nod.lua")
+loadmodule("font-ini.lua")
+loadmodule("font-con.lua")
+loadmodule("luatex-fonts-enc.lua")
+loadmodule("font-cid.lua")
+loadmodule("font-map.lua")
+loadmodule("luatex-fonts-syn.lua")
+loadmodule("font-tfm.lua")
+loadmodule("font-afm.lua")
+loadmodule("font-afk.lua")
+loadmodule("luatex-fonts-tfm.lua")
+loadmodule("font-oti.lua")
+loadmodule("font-otf.lua")
+loadmodule("font-otb.lua")
+loadmodule("luatex-fonts-inj.lua")
+loadmodule("luatex-fonts-ota.lua")
+loadmodule("luatex-fonts-otn.lua")
+loadmodule("font-otp.lua")
+loadmodule("luatex-fonts-lua.lua")
+loadmodule("font-def.lua")
+loadmodule("luatex-fonts-def.lua")
+loadmodule("luatex-fonts-ext.lua")
+loadmodule("luatex-fonts-cbk.lua")
+
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-parsers.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-parsers.lua
index 3a4faea9b56..0349cdc7738 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-parsers.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-parsers.lua
@@ -2,24 +2,12 @@
-------------------------------------------------------------------------------
-- FILE: luaotfload-parsers.lua
-- DESCRIPTION: various lpeg-based parsers used in Luaotfload
--- REQUIREMENTS: Luaotfload > 2.4
--- AUTHOR: Philipp Gesang (Phg), <phg42.2a@gmail.com>
--- VERSION: same as Luaotfload
--- CREATED: 2014-01-14 10:15:20+0100
+-- REQUIREMENTS: Luaotfload >= 2.6
+-- AUTHOR: Philipp Gesang (Phg), <phg@phi-gamma.net>
-------------------------------------------------------------------------------
--
-if not modules then modules = { } end modules ['luaotfload-parsers'] = {
- version = "2.5",
- comment = "companion to luaotfload-main.lua",
- author = "Philipp Gesang",
- copyright = "Luaotfload Development Team",
- license = "GNU GPL v2.0"
-}
-
-luaotfload = luaotfload or { }
-luaotfload.parsers = luaotfload.parsers or { }
-local parsers = luaotfload.parsers
+local traversal_maxdepth = 42 --- prevent stack overflows
local rawset = rawset
@@ -40,8 +28,7 @@ local filedirname = file.dirname
local io = io
local ioopen = io.open
-local log = luaotfload.log
-local logreport = log.report
+local logreport = print
local string = string
local stringsub = string.sub
@@ -190,6 +177,7 @@ local p_cheapxml = header * root
--doc]]--
--- string -> path list
local fonts_conf_scanner = function (path)
+ logreport("both", 5, "db", "Read fontconfig file %s.", path)
local fh = ioopen(path, "r")
if not fh then
logreport("both", 3, "db", "Cannot open fontconfig file %s.", path)
@@ -197,12 +185,17 @@ local fonts_conf_scanner = function (path)
end
local raw = fh:read"*all"
fh:close()
+ logreport("both", 7, "db",
+ "Reading fontconfig file %s completed (%d bytes).",
+ path, #raw)
+ logreport("both", 5, "db", "Scan fontconfig file %s.", path)
local confdata = lpegmatch(p_cheapxml, raw)
if not confdata then
logreport("both", 3, "db", "Cannot scan fontconfig file %s.", path)
return
end
+ logreport("both", 7, "db", "Scan of fontconfig file %s completed.", path)
return confdata
end
@@ -220,8 +213,9 @@ end
read_fonts_conf_indeed() -- Scan paths included from fontconfig
configuration files recursively. Called with eight arguments.
- The first four are
+ The first five are
+ · the current recursion depth
· the path to the file
· the expanded $HOME
· the expanded $XDG_CONFIG_HOME
@@ -234,12 +228,13 @@ end
--doc]]--
---- string -> string -> string -> string
+--- size_t -> string -> string -> string -> string
--- -> string list -> string list -> string list
--- -> (string -> fun option -> string list)
--- -> tab * tab * tab
local read_fonts_conf_indeed
-read_fonts_conf_indeed = function (start,
+read_fonts_conf_indeed = function (depth,
+ start,
home,
xdg_config_home,
xdg_data_home,
@@ -248,6 +243,18 @@ read_fonts_conf_indeed = function (start,
dirs_done,
find_files)
+ logreport ("both", 4, "db",
+ "Fontconfig scanner processing path %s.",
+ start)
+ if depth >= traversal_maxdepth then
+ --- prevent overflow of Lua call stack
+ logreport ("both", 0, "db",
+ "Fontconfig scanner hit recursion limit (%d); "
+ .. "aborting directory traversal.",
+ traversal_maxdepth)
+ return acc, done, dirs_done
+ end
+
local paths = fonts_conf_scanner(start)
if not paths then --- nothing to do
return acc, done, dirs_done
@@ -273,6 +280,9 @@ read_fonts_conf_indeed = function (start,
--- distributions (e.g. Context minimals) installed
--- separately?
if not (stringfind(path, "texmf") or dirs_done[path]) then
+ logreport ("log", 5, "db",
+ "New fontconfig path at %s.",
+ path)
acc[#acc+1] = path
dirs_done[path] = true
end
@@ -292,34 +302,45 @@ read_fonts_conf_indeed = function (start,
end
if lfsisfile(path)
and kpsereadable_file(path)
- and not done[path]
then
- --- we exclude path with texmf in them, as they should
- --- be found otherwise
- acc = read_fonts_conf_indeed(path,
- home,
- xdg_config_home,
- xdg_data_home,
- acc,
- done,
- dirs_done,
- find_files)
+ if done[path] then
+ logreport("log", 3, "db",
+ "Skipping file at %s, already included.", path)
+ else
+ done[path] = true
+ acc = read_fonts_conf_indeed(depth + 1,
+ path,
+ home,
+ xdg_config_home,
+ xdg_data_home,
+ acc,
+ done,
+ dirs_done,
+ find_files)
+ end
elseif lfsisdir(path) then --- arrow code ahead
local config_files = find_files (path, conf_filter)
for _, filename in next, config_files do
if not done[filename] then
- acc = read_fonts_conf_indeed(filename,
- home,
- xdg_config_home,
- xdg_data_home,
- acc,
- done,
- dirs_done,
- find_files)
+ if done[path] then
+ logreport ("log", 3, "db",
+ "Skipping file at %s, already included.", path)
+ else
+ done[path] = true
+ acc = read_fonts_conf_indeed(depth + 1,
+ filename,
+ home,
+ xdg_config_home,
+ xdg_data_home,
+ acc,
+ done,
+ dirs_done,
+ find_files)
+ end
end
end
end --- match “kind”
- end --- iterate paths
+ end --- iterate paths
end
--inspect(acc)
@@ -350,7 +371,8 @@ local read_fonts_conf = function (path_list, find_files)
local done = { } ---> set: files inspected
local dirs_done = { } ---> set: dirs in list
for i=1, #path_list do --- we keep the state between files
- acc, done, dirs_done = read_fonts_conf_indeed(path_list[i],
+ acc, done, dirs_done = read_fonts_conf_indeed(0,
+ path_list[i],
home,
xdg_config_home,
xdg_data_home,
@@ -362,10 +384,6 @@ local read_fonts_conf = function (path_list, find_files)
return acc
end
-luaotfload.parsers.read_fonts_conf = read_fonts_conf
-
-
-
-------------------------------------------------------------------------------
--- MISC PARSERS
-------------------------------------------------------------------------------
@@ -373,10 +391,8 @@ luaotfload.parsers.read_fonts_conf = read_fonts_conf
local trailingslashes = slash^1 * P(-1)
local stripslashes = C((1 - trailingslashes)^0)
-parsers.stripslashes = stripslashes
local splitcomma = Ct((C(noncomma^1) + comma)^1)
-parsers.splitcomma = splitcomma
@@ -616,8 +632,6 @@ local font_request = Ct(path_lookup * (colon^-1 * features)^-1
--- v2.5 parser: 1065 rules
--- v1.2 parser: 230 rules
-luaotfload.parsers.font_request = font_request
-
-------------------------------------------------------------------------------
--- INI FILES
-------------------------------------------------------------------------------
@@ -694,7 +708,7 @@ local ini_variables = Cg (Cf (Ct "" * ini_variable^0, rawset), "variables")
local ini_section = Ct (ini_heading * ini_variables)
local ini_sections = skip_line^0 * ini_section^0
-local config = Ct (ini_sections)
+local parse_config = Ct (ini_sections)
--[=[doc--
@@ -726,6 +740,22 @@ local config = Ct (ini_sections)
--doc]=]--
-luaotfload.parsers.config = config
+return {
+ init = function ()
+ logreport = luaotfload.log.report
+ luaotfload.parsers = {
+ --- parameters
+ traversal_maxdepth = traversal_maxdepth,
+ --- main parsers
+ read_fonts_conf = read_fonts_conf,
+ font_request = font_request,
+ config = parse_config,
+ --- common patterns
+ stripslashes = stripslashes,
+ splitcomma = splitcomma,
+ }
+ return true
+ end
+}
-- vim:ft=lua:tw=71:et:sw=2:sts=4:ts=8
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-resolvers.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-resolvers.lua
new file mode 100644
index 00000000000..3d7f6b0e88d
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-resolvers.lua
@@ -0,0 +1,254 @@
+#!/usr/bin/env texlua
+-----------------------------------------------------------------------
+-- FILE: luaotfload-resolvers.lua
+-- USAGE: ./luaotfload-resolvers.lua
+-- DESCRIPTION: Resolvers for hooking into the fontloader
+-- REQUIREMENTS: Luaotfload and a decent bit of courage
+-- AUTHOR: Philipp Gesang (Phg), <phg@phi-gamma.net>
+-----------------------------------------------------------------------
+--
+--- The bare fontloader uses a set of simplistic file name resolvers
+--- that must be overloaded by the user (i. e. us).
+
+if not lualibs then error "this module requires Luaotfload" end
+if not luaotfload then error "this module requires Luaotfload" end
+
+--[[doc--
+
+ Relying on the \verb|name:| resolver for everything has been the
+ source of permanent trouble with the database.
+ With the introduction of the new syntax parser we now have enough
+ granularity to distinguish between the \XETEX emulation layer and
+ the genuine \verb|name:| and \verb|file:| lookups of \LUATEX-Fonts.
+ Another benefit is that we can now easily plug in or replace new
+ lookup behaviors if necessary.
+ The name resolver remains untouched, but it calls
+ \luafunction{fonts.names.resolve()} internally anyways (see
+ \fileent{luaotfload-database.lua}).
+
+--doc]]--
+
+local next = next
+local kpsefind_file = kpse.find_file
+local lfsisfile = lfs.isfile
+local stringlower = string.lower
+local stringformat = string.format
+local filesuffix = file.suffix
+local fileremovesuffix = file.removesuffix
+local luatexbase = luatexbase
+local logreport = luaotfload.log.report
+
+--[[doc--
+
+ \identifier{luaotfload} promises easy access to system fonts.
+ Without additional precautions, this cannot be achieved by
+ \identifier{kpathsea} alone, because it searches only the
+ \fileent{texmf} directories by default.
+ Although it is possible for \identifier{kpathsea} to include extra
+ paths by adding them to the \verb|OSFONTDIR| environment variable,
+ this is still short of the goal »\emphasis{it just works!}«.
+ When building the font database \identifier{luaotfload} scans
+ system font directories anyways, so we already have all the
+ information for looking sytem fonts.
+ With the release version 2.2 the file names are indexed in the
+ database as well and we are ready to resolve \verb|file:| lookups
+ this way.
+ Thus we no longer need to call the \identifier{kpathsea} library in
+ most cases when looking up font files, only when generating the
+ database, and when verifying the existence of a file in the
+ \fileent{texmf} tree.
+
+--doc]]--
+
+local resolve_file
+resolve_file = function (specification)
+ local name = fonts.names.lookup_font_file (specification.name)
+ local suffix = filesuffix (name)
+ if fonts.formats[suffix] then
+ specification.forced = stringlower (suffix)
+ specification.forcedname = fileremovesuffix(name)
+ else
+ specification.name = name
+ end
+end
+
+--[[doc--
+
+ Prior to version 2.2, \identifier{luaotfload} did not distinguish
+ \verb|file:| and \verb|path:| lookups, causing complications with
+ the resolver.
+ Now we test if the requested name is an absolute path in the file
+ system, otherwise we fall back to the \verb|file:| lookup.
+
+--doc]]--
+
+local resolve_path
+resolve_path = function (specification)
+ local name = specification.name
+ local exists, _ = lfsisfile(name)
+ if not exists then -- resort to file: lookup
+ logreport ("log", 0, "load",
+ "path lookup of %q unsuccessful, falling back to file:",
+ name)
+ resolve_file (specification)
+ else
+ local suffix = filesuffix (name)
+ if fonts.formats[suffix] then
+ specification.forced = stringlower (suffix)
+ specification.name = fileremovesuffix(name)
+ specification.forcedname = name
+ else
+ specification.name = name
+ end
+ end
+end
+
+--[[doc--
+
+ The \verb|name:| resolver.
+
+--doc]]--
+
+--- fonts.names.resolvers.name -- Customized version of the
+--- generic name resolver.
+
+local resolve_name
+resolve_name = function (specification)
+ local resolver = fonts.names.lookup_font_name_cached
+ if config.luaotfload.run.resolver == "normal" then
+ resolver = fonts.names.lookup_font_name
+ end
+ local resolved, subfont = resolver (specification)
+ if resolved then
+ logreport ("log", 0, "load", "Lookup/name: %q -> \"%s%s\"",
+ specification.name,
+ resolved,
+ subfont and stringformat ("(%d)", subfont) or "")
+ specification.resolved = resolved
+ specification.sub = subfont
+ specification.forced = stringlower (filesuffix (resolved) or "")
+ specification.forcedname = resolved
+ specification.name = fileremovesuffix (resolved)
+ else
+ resolve_file (specification)
+ end
+end
+
+--[[doc--
+
+ We classify as \verb|anon:| those requests that have neither a
+ prefix nor brackets. According to Khaled\footnote{%
+ % XXX dead link‽
+ \url{https://github.com/phi-gamma/luaotfload/issues/4#issuecomment-17090553}.
+ }
+ they are the \XETEX equivalent of a \verb|name:| request, so we
+ will be treating them as such or, at least, in a similar fashion.
+
+ Not distinguishing between “anon” and “name” requests has a serious
+ drawback: The syntax is overloaded for requesting fonts in
+ \identifier{Type1} (\abbrev{tfm}, \abbrev{ofm}) format.
+ These are essentially \verb|file:| lookups and must be caught
+ before the \verb|name:| resolver kicks in, lest they cause the
+ database to update.
+ Even if we were to require the \verb|file:| prefix for all
+ \identifier{Type1} requests, tests have shown that certain fonts
+ still include further fonts (e.~g. \fileent{omlgcb.ofm} will ask
+ for \fileent{omsecob.tfm}) \emphasis{using the old syntax}.
+ For this reason, we introduce an extra check with an early return.
+
+--doc]]--
+
+local type1_formats = { "tfm", "ofm", "TFM", "OFM", }
+
+local resolve_anon
+resolve_anon = function (specification)
+ local name = specification.name
+ for i=1, #type1_formats do
+ local format = type1_formats[i]
+ local suffix = filesuffix (name)
+ if resolvers.findfile(name, format) then
+ local usename = suffix == format and fileremovesuffix (name) or name
+ specification.forcedname = file.addsuffix (usename, format)
+ specification.forced = format
+ return
+ end
+ end
+ --- under some weird circumstances absolute paths get
+ --- passed to the definer; we have to catch them
+ --- before the name: resolver misinterprets them.
+ name = specification.specification
+ local exists, _ = lfsisfile(name)
+ if exists then --- garbage; we do this because we are nice,
+ --- not because it is correct
+ logreport ("log", 1, "load", "file %q exists", name)
+ logreport ("log", 1, "load",
+ "... overriding borked anon: lookup with path: lookup")
+ specification.name = name
+ resolve_path (specification)
+ return
+ end
+ resolve_name (specification)
+end
+
+--[[doc--
+
+ {\bfseries EXPERIMENTAL}:
+ \identifier{kpse}-only resolver, for those who can do without
+ system fonts.
+
+--doc]]--
+
+local resolve_kpse
+resolve_kpse = function (specification)
+ local name = specification.name
+ local suffix = filesuffix (name)
+ if suffix and fonts.formats[suffix] then
+ name = fileremovesuffix (name)
+ if resolvers.findfile (name, suffix) then
+ specification.forced = stringlower (suffix)
+ specification.forcedname = name
+ return
+ end
+ end
+ for t, format in next, fonts.formats do --- brute force
+ if kpsefind_file (name, format) then
+ specification.forced = t
+ specification.name = name
+ return
+ end
+ end
+end
+
+--[[doc--
+
+ Also {\bfseries EXPERIMENTAL}: custom file resolvers via callback.
+
+--doc]]--
+
+local resolve_my = function (specification)
+ luatexbase.call_callback ("luaotfload.resolve_font", specification)
+end
+
+return {
+ init = function ( )
+ if luatexbase and luatexbase.create_callback then
+ luatexbase.create_callback ("luaotfload.resolve_font",
+ "simple", function () end)
+ end
+ logreport ("log", 5, "resolvers", "installing font resolvers", name)
+ local request_resolvers = fonts.definers.resolvers
+ request_resolvers.file = resolve_file
+ request_resolvers.name = resolve_name
+ request_resolvers.anon = resolve_anon
+ request_resolvers.path = resolve_path
+ request_resolvers.kpse = resolve_kpse
+ request_resolvers.my = resolve_my
+ fonts.formats.ofm = "type1"
+ fonts.encodings = fonts.encodings or { }
+ fonts.encodings.known = fonts.encodings.known or { }
+ return true
+ end, --- [.init]
+}
+
+--- vim:ft=lua:ts=8:sw=4:et
+
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-status.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-status.lua
index 1a6a9f2fb62..94dc1fd75b9 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-status.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-status.lua
@@ -1,35 +1,78 @@
return {
["hashes"]={
- { "mkstatus", "38dd7ee3f5b372550e9d5e47466ac6c4" },
- { "mkglyphlist", "62bc2c915a0fc936f9249deb0eddcaf3" },
- { "mkcharacters", "ec694a87f77067912eb73402ec99040d" },
- { "luaotfload-tool.lua", "c48f1995e9e74155aad6a745db16bcab" },
- { "luaotfload-parsers.lua", "cc86ac664ce5aedd81218e59b7180e83" },
- { "luaotfload-override.lua", "e6f67a268fa9362d5ebc5cbecad1a900" },
- { "luaotfload-fontloader.lua", "5bce3c40d6841694d6be5ad3680d5b10" },
- { "luaotfload-main.lua", "4a3ac1af4af76b6a7ba60246d90a08c0" },
- { "luaotfload-log.lua", "1f8aee13a8b8a7a20cd8b03285ab425a" },
- { "luaotfload-loaders.lua", "c364151f603514433fa3128939825d16" },
- { "luaotfload-letterspace.lua", "06cfb1e742d496b99c8980fdfd3f29f4" },
+ { "fontloader-2015-12-09.lua", "76f5af1071a3fd169f0a80704a6a5f5e" },
+ { "fontloader-util-str.lua", "cc6f38cc53aff51d60e2223bf922bb0d" },
+ { "fontloader-swiglib-test.lua", "c1cdeff07e5b8896f7aa98ce50c31618" },
+ { "fontloader-swiglib.lua", "3a350d89416637073b7f09e281dc7c11" },
+ { "fontloader-preprocessor.lua", "f87963c22d5523218f1beee7e315bae5" },
+ { "fontloader-mplib.lua", "db9d22740880daaaeb10070599847558" },
+ { "fontloader-math.lua", "f7eda9d5922e4aa7bb858ba9943fdacd" },
+ { "fontloader-l-table.lua", "c44995044ee3ccb00fccf1700881b2fa" },
+ { "fontloader-l-string.lua", "64d08350c5083058de7ec3d25ae05047" },
+ { "fontloader-l-math.lua", "51275e81c652e3fbd7d02b1c383d3049" },
+ { "fontloader-l-lua.lua", "ee479f6900b8c4d0fde7a8f536a1ce26" },
+ { "fontloader-l-lpeg.lua", "1d7a1cf9d3ee07e758f611ee41d39c41" },
+ { "fontloader-l-io.lua", "a3292f4dad2705c4eeb1d91c40bd0fde" },
+ { "fontloader-l-function.lua", "a7e68a9703c35238729da41a474e951b" },
+ { "fontloader-l-file.lua", "bb25347eee3208dbb419e80eb2809e99" },
+ { "fontloader-l-boolean.lua", "abe28515dd33e8f6c416c09bca351cf8" },
+ { "fontloader-languages.lua", "16446704262489ccbe65a963dcd490f1" },
+ { "fontloader-font-tfm.lua", "bbee5eddb11211fb0a8d993db678bf3c" },
+ { "fontloader-fonts-tfm.lua", "8fd3865240e4e87e99e0739abeda2322" },
+ { "fontloader-fonts-syn.lua", "9729d0e49b770f78e88dab86739e0297" },
+ { "fontloader-fonts-otn.lua", "f01870a8622259f58ffbdc5718a53377" },
+ { "fontloader-fonts-ota.lua", "e18236703ac20ece7ff45f9209a9b9d6" },
+ { "fontloader-fonts-lua.lua", "50b8edb1db7009b6c661ab71ff24a466" },
+ { "fontloader-fonts.lua", "19a42e8ce2c816e2d3262f3d72b0a5c7" },
+ { "fontloader-fonts-inj.lua", "2e61e59f1931a84336c6aea812e219c2" },
+ { "fontloader-fonts-ext.lua", "0eee87fb26b7d135da88ac0a43a8037a" },
+ { "fontloader-fonts-enc.lua", "b224fe179312d924ffaf8334cf5ef15b" },
+ { "fontloader-fonts-demo-vf-1.lua", "edf37ece9bd112b1b13d9475528ab210" },
+ { "fontloader-fonts-def.lua", "8dc05f2aa96d2fd0245242c8fd3320eb" },
+ { "fontloader-fonts-cbk.lua", "3e86c6a492ca8d792f6b06149ba0dd57" },
+ { "fontloader-font-otp.lua", "18b4375155925ee1809150f4f6c3973b" },
+ { "fontloader-font-oti.lua", "035a51b49029d7da48453ec5f1d65018" },
+ { "fontloader-font-otf.lua", "b1a5f4340125198b1b0489ef903dd267" },
+ { "fontloader-font-otb.lua", "93461f2f412a9b33b35a273c09b64291" },
+ { "fontloader-font-map.lua", "60c9f3d4d80944ce9123c9cddc1e57e5" },
+ { "fontloader-font-ini.lua", "179f0a75cda26696c1b1cd6d7fe0d8ae" },
+ { "fontloader-font-def.lua", "3c71c27300a8cb5c29f5d278d2049fb6" },
+ { "fontloader-font-con.lua", "5ae91fc3b5bb8d22b2fba27258d49971" },
+ { "fontloader-font-cid.lua", "52421d1fdaa07ec4b1d936c6ff5079be" },
+ { "fontloader-font-afm.lua", "ece4863414d6b38c2e577110c9b55bd3" },
+ { "fontloader-font-afk.lua", "b36a76ceb835f41f8c05b471000ddc14" },
+ { "fontloader-data-con.lua", "675f5a0af45ffb3e0d2e2ab5d6c2e47b" },
+ { "fontloader-basics-nod.lua", "d060e22b2afed6e7624f879b0e1a7d71" },
+ { "fontloader-tl2014.lua", "5bce3c40d6841694d6be5ad3680d5b10" },
+ { "fontloader-reference.lua", "f3481efe06dc45db0dc7404aa96754e3" },
+ { "fontloader-basics-gen.lua", "aa7229a3a0ae35d19bbe46e71d65bd2c" },
+ { "luaotfload-package.lua", "dcc2311200ba2b0efa2ac76b152b9720" },
+ { "mktests", "918cb50be9ee8bd645ac1a27dc501e8c" },
+ { "mkstatus", "fcc6c1817169a64cdf8b0ef48bec36a2" },
+ { "mkimport", "8c94dde3cfe04864e1785e6fc8bebc96" },
+ { "mkglyphlist", "0acaf0fbc9dce6fda4e717a7ddcfdef6" },
+ { "mkcharacters", "5274a32b25f2dc1f55e75546c551035e" },
{ "luaotfload-glyphlist.lua", "ff440162d1b8a78a586375ee65630c21" },
- { "luaotfload-fonts-tfm.lua", "8fd3865240e4e87e99e0739abeda2322" },
- { "luaotfload-fonts-lua.lua", "50b8edb1db7009b6c661ab71ff24a466" },
- { "luaotfload-fonts-ext.lua", "0eee87fb26b7d135da88ac0a43a8037a" },
- { "luaotfload-fonts-enc.lua", "32775266bfc9eb6bbd9a16e3a7de398d" },
- { "luaotfload-fonts-def.lua", "8dc05f2aa96d2fd0245242c8fd3320eb" },
- { "luaotfload-fonts-cbk.lua", "c1b7a4e5508d68f45a99cdc38c2d0885" },
- { "luaotfload-features.lua", "9a0414d3bcd26497f8082c7803b38676" },
- { "luaotfload-diagnostics.lua", "273afde55dff486fb222a50981b3b1d7" },
- { "luaotfload-database.lua", "3392acc5a193a4988c156d794ed6933f" },
- { "luaotfload-colors.lua", "99878378ef0954d0b67f13db8456dea7" },
- { "luaotfload-characters.lua", "f89547bf3a33bea16a94c04ca8ef7f35" },
- { "luaotfload-basics-nod.lua", "7fdaff58d8c32e3115286504cbd7bcac" },
- { "luaotfload-basics-gen.lua", "9d5c3fc031af110ba67f3faa1a569779" },
- { "luaotfload-auxiliary.lua", "b3c893f68217b4147e5d3ba694648583" },
+ { "luaotfload-characters.lua", "813eeb15ddf352b9385552677cb7773f" },
+ { "luaotfload-tool.lua", "75e1f5bf8acf87a63f732083851b3c75" },
+ { "luaotfload-resolvers.lua", "11846a9f9f4f292911f053d07ab09771" },
+ { "luaotfload-parsers.lua", "bcbf9be9a8ef5dc77025dff46a79007c" },
+ { "luaotfload-main.lua", "98456048a0ebb8040c6b46de529398ec" },
+ { "luaotfload-log.lua", "767adc12efe986322d16dd85f05b3e75" },
+ { "luaotfload-loaders.lua", "40e9e162b84e22db29cd79afb8a34761" },
+ { "luaotfload-letterspace.lua", "3f87639c395f3dd83b6c3ae4f717a02a" },
+ { "luaotfload-init.lua", "d850a9a2d87c122de38b9221bf39473c" },
+ { "luaotfload-features.lua", "3e98785b46c6779f70ed6e7e4d497e53" },
+ { "luaotfload-diagnostics.lua", "7b35c9f91e3e73fc5a61dbfe1f0e7ad9" },
+ { "luaotfload-database.lua", "ffb5c6497be2c518eb28fcefaa3175c1" },
+ { "luaotfload-configuration.lua", "04eb776c853a22fc49dcf0e52da85dda" },
+ { "luaotfload-colors.lua", "b68a52ce0b046343cb689d981d0c5a7a" },
+ { "luaotfload-auxiliary.lua", "04d8d1f39e06255a41a9f00febc66e2b" },
},
["notes"]={
- ["committer"]="Philipp Gesang <phg42.2a@gmail.com>",
- ["revision"]="fa525b6b60f23a95f550aa737d07b9919720e7fc",
- ["timestamp"]="2014-08-10 23:15:22 +0200",
+ ["committer"]="Philipp Gesang <phg@phi-gamma.net>",
+ ["loader"]="fontloader-2015-12-09.lua",
+ ["revision"]="656e7a115ab7c87743be59808713908f1d22fa6c",
+ ["timestamp"]="2015-12-09 23:02:31 +0100",
},
} \ No newline at end of file
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload.sty b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload.sty
index 79a9360eb5d..836d8198bd7 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload.sty
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload.sty
@@ -1,8 +1,10 @@
-%% Copyright (C) 2009-2014
+%% Copyright (C) 2009-2015
%%
%% by Elie Roux <elie.roux@telecom-bretagne.eu>
%% and Khaled Hosny <khaledhosny@eglug.org>
%% and Philipp Gesang <philipp.gesang@alumni.uni-heidelberg.de>
+%% and Dohyun Kim <nomosnomos@gmail.com>
+%% and David Carlisle <d.p.carlisle@gmail.com>
%%
%% This file is part of Luaotfload.
%%
@@ -32,16 +34,17 @@
%%
\csname ifluaotfloadloaded\endcsname
\let\ifluaotfloadloaded\endinput
-\bgroup\expandafter\expandafter\expandafter\egroup
-\expandafter\ifx\csname ProvidesPackage\endcsname\relax
- \input luatexbase.sty
-\else
- \NeedsTeXFormat{LaTeX2e}
+\ifx\newluafunction\undefined
+ \input ltluatex
+\fi
+\ifdefined\ProvidesPackage
\ProvidesPackage{luaotfload}%
%% FIXME The date is meaningless, we need to find a way to
%% use the git revision instead.
- [2014/08/10 v2.5-4 OpenType layout system]
- \RequirePackage{luatexbase}
+ [2015/12/09 v2.6 OpenType layout system]
\fi
-\RequireLuaModule{luaotfload-main}
+\directlua{
+require('luaotfload-main')
+local _void = luaotfload.main ()
+}