summaryrefslogtreecommitdiff
path: root/Master/texmf-dist/tex/generic/context/luatex
diff options
context:
space:
mode:
authorMojca Miklavec <mojca.miklavec@gmail.com>2014-05-05 20:29:55 +0000
committerMojca Miklavec <mojca.miklavec@gmail.com>2014-05-05 20:29:55 +0000
commitba9a57343987f1c2c72396e7c38f1fa30352c24c (patch)
tree66a8b12cdf67427ce96770fd0e9e581759aade1c /Master/texmf-dist/tex/generic/context/luatex
parent15242121b8ddf7d4a041fb3998d295dd8232e1eb (diff)
ConTeXt 2014.04.28 23:24
git-svn-id: svn://tug.org/texlive/trunk@33856 c570f23f-e606-0410-a88d-b1316a301751
Diffstat (limited to 'Master/texmf-dist/tex/generic/context/luatex')
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua40
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua94
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-basics.tex2
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-inj.lua523
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua3042
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-otn.lua2848
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-syn.lua4
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua12
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.tex4
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex4
10 files changed, 5934 insertions, 639 deletions
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua
index 4a46fbb0782..a304ab6aaa0 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua
@@ -89,6 +89,7 @@ local remapper = {
fea = "font feature files",
pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
+ afm = "afm",
}
function resolvers.findfile(name,fileformat)
@@ -117,6 +118,11 @@ end
resolvers.findbinfile = resolvers.findfile
+function resolvers.loadbinfile(filename,filetype)
+ local data = io.loaddata(filename)
+ return true, data, #data
+end
+
function resolvers.resolve(s)
return s
end
@@ -149,19 +155,29 @@ do
local cachepaths = kpse.expand_var('$TEXMFCACHE') or ""
- -- quite like tex live or so
+ -- quite like tex live or so (the weird $TEXMFCACHE test seems to be needed on miktex)
- if cachepaths == "" then
+ if cachepaths == "" or cachepaths == "$TEXMFCACHE" then
cachepaths = kpse.expand_var('$TEXMFVAR') or ""
end
- -- this also happened to be used
+ -- this also happened to be used (the weird $TEXMFVAR test seems to be needed on miktex)
- if cachepaths == "" then
+ if cachepaths == "" or cachepaths == "$TEXMFVAR" then
cachepaths = kpse.expand_var('$VARTEXMF') or ""
end
- -- and this is a last resort
+ -- and this is a last resort (hm, we could use TEMP or TEMPDIR)
+
+ if cachepaths == "" then
+ local fallbacks = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
+ for i=1,#fallbacks do
+ cachepaths = os.getenv(fallbacks[i]) or ""
+ if cachepath ~= "" and lfs.isdir(cachepath) then
+ break
+ end
+ end
+ end
if cachepaths == "" then
cachepaths = "."
@@ -238,6 +254,18 @@ function caches.loaddata(paths,name)
for i=1,#paths do
local data = false
local luaname, lucname = makefullname(paths[i],name)
+ if lucname and not lfs.isfile(lucname) and type(caches.compile) == "function" then
+ -- in case we used luatex and luajittex mixed ... lub or luc file
+ texio.write(string.format("(compiling luc: %s)",lucname))
+ data = loadfile(luaname)
+ if data then
+ data = data()
+ end
+ if data then
+ caches.compile(data,luaname,lucname)
+ return data
+ end
+ end
if lucname and lfs.isfile(lucname) then -- maybe also check for size
texio.write(string.format("(load luc: %s)",lucname))
data = loadfile(lucname)
@@ -267,7 +295,7 @@ function caches.savedata(path,name,data)
local luaname, lucname = makefullname(path,name)
if luaname then
texio.write(string.format("(save: %s)",luaname))
- table.tofile(luaname,data,true,{ reduce = true })
+ table.tofile(luaname,data,true)
if lucname and type(caches.compile) == "function" then
os.remove(lucname) -- better be safe
texio.write(string.format("(save: %s)",lucname))
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua
index 5ab9df7f94b..373dab5a8c7 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua
@@ -54,22 +54,33 @@ nodes.handlers = { }
local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end
local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end
local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" }
+local disccodes = { [0] = "discretionary", "explicit", "automatic", "regular", "first", "second" }
nodes.nodecodes = nodecodes
nodes.whatcodes = whatcodes
nodes.whatsitcodes = whatcodes
nodes.glyphcodes = glyphcodes
+nodes.disccodes = disccodes
local free_node = node.free
local remove_node = node.remove
local new_node = node.new
local traverse_id = node.traverse_id
-local math_code = nodecodes.math
-
nodes.handlers.protectglyphs = node.protect_glyphs
nodes.handlers.unprotectglyphs = node.unprotect_glyphs
+local math_code = nodecodes.math
+local end_of_math = node.end_of_math
+
+function node.end_of_math(n)
+ if n.id == math_code and n.subtype == 1 then
+ return n
+ else
+ return end_of_math(n)
+ end
+end
+
function nodes.remove(head, current, free_too)
local t = current
head, current = remove_node(head,current)
@@ -88,17 +99,80 @@ function nodes.delete(head,current)
return nodes.remove(head,current,true)
end
-nodes.before = node.insert_before
-nodes.after = node.insert_after
-
function nodes.pool.kern(k)
local n = new_node("kern",1)
n.kern = k
return n
end
-function nodes.endofmath(n)
- for n in traverse_id(math_code,n.next) do
- return n
- end
-end
+-- experimental
+
+local getfield = node.getfield or function(n,tag) return n[tag] end
+local setfield = node.setfield or function(n,tag,value) n[tag] = value end
+
+nodes.getfield = getfield
+nodes.setfield = setfield
+
+nodes.getattr = getfield
+nodes.setattr = setfield
+
+if node.getid then nodes.getid = node.getid else function nodes.getid (n) return getfield(n,"id") end end
+if node.getsubtype then nodes.getsubtype = node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end
+if node.getnext then nodes.getnext = node.getnext else function nodes.getnext (n) return getfield(n,"next") end end
+if node.getprev then nodes.getprev = node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end
+if node.getchar then nodes.getchar = node.getchar else function nodes.getchar (n) return getfield(n,"char") end end
+if node.getfont then nodes.getfont = node.getfont else function nodes.getfont (n) return getfield(n,"font") end end
+if node.getlist then nodes.getlist = node.getlist else function nodes.getlist (n) return getfield(n,"list") end end
+
+function nodes.tonut (n) return n end
+function nodes.tonode(n) return n end
+
+-- being lazy ... just copy a bunch ... not all needed in generic but we assume
+-- nodes to be kind of private anyway
+
+nodes.tostring = node.tostring or tostring
+nodes.copy = node.copy
+nodes.copy_list = node.copy_list
+nodes.delete = node.delete
+nodes.dimensions = node.dimensions
+nodes.end_of_math = node.end_of_math
+nodes.flush_list = node.flush_list
+nodes.flush_node = node.flush_node
+nodes.free = node.free
+nodes.insert_after = node.insert_after
+nodes.insert_before = node.insert_before
+nodes.hpack = node.hpack
+nodes.new = node.new
+nodes.tail = node.tail
+nodes.traverse = node.traverse
+nodes.traverse_id = node.traverse_id
+nodes.slide = node.slide
+nodes.vpack = node.vpack
+
+nodes.first_glyph = node.first_glyph
+nodes.first_character = node.first_character
+nodes.has_glyph = node.has_glyph or node.first_glyph
+
+nodes.current_attr = node.current_attr
+nodes.do_ligature_n = node.do_ligature_n
+nodes.has_field = node.has_field
+nodes.last_node = node.last_node
+nodes.usedlist = node.usedlist
+nodes.protrusion_skippable = node.protrusion_skippable
+nodes.write = node.write
+
+nodes.has_attribute = node.has_attribute
+nodes.set_attribute = node.set_attribute
+nodes.unset_attribute = node.unset_attribute
+
+nodes.protect_glyphs = node.protect_glyphs
+nodes.unprotect_glyphs = node.unprotect_glyphs
+nodes.kerning = node.kerning
+nodes.ligaturing = node.ligaturing
+nodes.mlist_to_hlist = node.mlist_to_hlist
+
+-- in generic code, at least for some time, we stay nodes, while in context
+-- we can go nuts (e.g. experimental); this split permits us us keep code
+-- used elsewhere stable but at the same time play around in context
+
+nodes.nuts = nodes
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics.tex b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics.tex
index bb34587ff6c..abe49897016 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics.tex
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics.tex
@@ -12,6 +12,8 @@
%D rather dumb attribute allocator. We start at 256 because we don't want
%D any interference with the attributes used in the font handler.
+\ifx\newattribute\undefined \else \endinput \fi
+
\newcount \lastallocatedattribute \lastallocatedattribute=255
\def\newattribute#1%
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-inj.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-inj.lua
new file mode 100644
index 00000000000..5e6c0707092
--- /dev/null
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-inj.lua
@@ -0,0 +1,523 @@
+if not modules then modules = { } end modules ['node-inj'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This is very experimental (this will change when we have luatex > .50 and
+-- a few pending thingies are available. Also, Idris needs to make a few more
+-- test fonts. Btw, future versions of luatex will have extended glyph properties
+-- that can be of help. Some optimizations can go away when we have faster machines.
+
+local next = next
+local utfchar = utf.char
+
+local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
+
+local report_injections = logs.reporter("nodes","injections")
+
+local attributes, nodes, node = attributes, nodes, node
+
+fonts = fonts
+local fontdata = fonts.hashes.identifiers
+
+nodes.injections = nodes.injections or { }
+local injections = nodes.injections
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local kern_code = nodecodes.kern
+local nodepool = nodes.pool
+local newkern = nodepool.kern
+
+local traverse_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+
+local a_kernpair = attributes.private('kernpair')
+local a_ligacomp = attributes.private('ligacomp')
+local a_markbase = attributes.private('markbase')
+local a_markmark = attributes.private('markmark')
+local a_markdone = attributes.private('markdone')
+local a_cursbase = attributes.private('cursbase')
+local a_curscurs = attributes.private('curscurs')
+local a_cursdone = attributes.private('cursdone')
+
+-- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
+-- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
+-- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
+-- that this code is not 100% okay but examples are needed to figure things out.
+
+function injections.installnewkern(nk)
+ newkern = nk or newkern
+end
+
+local cursives = { }
+local marks = { }
+local kerns = { }
+
+-- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in
+-- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we
+-- can share tables.
+
+-- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs
+-- checking with husayni (volt and fontforge).
+
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+ local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
+ local ws, wn = tfmstart.width, tfmnext.width
+ local bound = #cursives + 1
+ start[a_cursbase] = bound
+ nxt[a_curscurs] = bound
+ cursives[bound] = { rlmode, dx, dy, ws, wn }
+ return dx, dy, bound
+end
+
+function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
+ local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
+ -- dy = y - h
+ if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
+ local bound = current[a_kernpair]
+ if bound then
+ local kb = kerns[bound]
+ -- inefficient but singles have less, but weird anyway, needs checking
+ kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
+ else
+ bound = #kerns + 1
+ current[a_kernpair] = bound
+ kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
+ end
+ return x, y, w, h, bound
+ end
+ return x, y, w, h -- no bound
+end
+
+function injections.setkern(current,factor,rlmode,x,tfmchr)
+ local dx = factor*x
+ if dx ~= 0 then
+ local bound = #kerns + 1
+ current[a_kernpair] = bound
+ kerns[bound] = { rlmode, dx }
+ return dx, bound
+ else
+ return 0, 0
+ end
+end
+
+function injections.setmark(start,base,factor,rlmode,ba,ma) -- ba=baseanchor, ma=markanchor
+ local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
+ local bound = base[a_markbase]
+ local index = 1
+ if bound then
+ local mb = marks[bound]
+ if mb then
+ -- if not index then index = #mb + 1 end
+ index = #mb + 1
+ mb[index] = { dx, dy, rlmode }
+ start[a_markmark] = bound
+ start[a_markdone] = index
+ return dx, dy, bound
+ else
+ report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
+ end
+ end
+ index = index or 1
+ bound = #marks + 1
+ base[a_markbase] = bound
+ start[a_markmark] = bound
+ start[a_markdone] = index
+ marks[bound] = { [index] = { dx, dy, rlmode } }
+ return dx, dy, bound
+end
+
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+
+local function trace(head)
+ report_injections("begin run")
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype < 256 then
+ local kp = n[a_kernpair]
+ local mb = n[a_markbase]
+ local mm = n[a_markmark]
+ local md = n[a_markdone]
+ local cb = n[a_cursbase]
+ local cc = n[a_curscurs]
+ local char = n.char
+ report_injections("font %s, char %U, glyph %c",n.font,char,char)
+ if kp then
+ local k = kerns[kp]
+ if k[3] then
+ report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
+ else
+ report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
+ end
+ end
+ if mb then
+ report_injections(" markbase: bound %a",mb)
+ end
+ if mm then
+ local m = marks[mm]
+ if mb then
+ local m = m[mb]
+ if m then
+ report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
+ else
+ report_injections(" markmark: bound %a, missing index",mm)
+ end
+ else
+ m = m[1]
+ report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
+ end
+ end
+ if cb then
+ report_injections(" cursbase: bound %a",cb)
+ end
+ if cc then
+ local c = cursives[cc]
+ report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
+ end
+ end
+ end
+ report_injections("end run")
+end
+
+-- todo: reuse tables (i.e. no collection), but will be extra fields anyway
+-- todo: check for attribute
+
+-- We can have a fast test on a font being processed, so we can check faster for marks etc
+-- but I'll make a context variant anyway.
+
+local function show_result(head)
+ local current = head
+ local skipping = false
+ while current do
+ local id = current.id
+ if id == glyph_code then
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
+ skipping = false
+ elseif id == kern_code then
+ report_injections("kern: %p",current.kern)
+ skipping = false
+ elseif not skipping then
+ report_injections()
+ skipping = true
+ end
+ current = current.next
+ end
+end
+
+function injections.handler(head,where,keep)
+ local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
+ if has_marks or has_cursives then
+ if trace_injections then
+ trace(head)
+ end
+ -- in the future variant we will not copy items but refs to tables
+ local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0
+ if has_kerns then -- move outside loop
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
+ if n.subtype < 256 then
+ nofvalid = nofvalid + 1
+ valid[nofvalid] = n
+ if n.font ~= nf then
+ nf = n.font
+ tm = fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n] = tm[n.char]
+ end
+ local k = n[a_kernpair]
+ if k then
+ local kk = kerns[k]
+ if kk then
+ local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
+ local dy = y - h
+ if dy ~= 0 then
+ ky[n] = dy
+ end
+ if w ~= 0 or x ~= 0 then
+ wx[n] = kk
+ end
+ rl[n] = kk[1] -- could move in test
+ end
+ end
+ end
+ end
+ else
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype < 256 then
+ nofvalid = nofvalid + 1
+ valid[nofvalid] = n
+ if n.font ~= nf then
+ nf = n.font
+ tm = fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n] = tm[n.char]
+ end
+ end
+ end
+ end
+ if nofvalid > 0 then
+ -- we can assume done == true because we have cursives and marks
+ local cx = { }
+ if has_kerns and next(ky) then
+ for n, k in next, ky do
+ n.yoffset = k
+ end
+ end
+ -- todo: reuse t and use maxt
+ if has_cursives then
+ local p_cursbase, p = nil, nil
+ -- since we need valid[n+1] we can also use a "while true do"
+ local t, d, maxt = { }, { }, 0
+ for i=1,nofvalid do -- valid == glyphs
+ local n = valid[i]
+ if not mk[n] then
+ local n_cursbase = n[a_cursbase]
+ if p_cursbase then
+ local n_curscurs = n[a_curscurs]
+ if p_cursbase == n_curscurs then
+ local c = cursives[n_curscurs]
+ if c then
+ local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5]
+ if rlmode >= 0 then
+ dx = dx - ws
+ else
+ dx = dx + wn
+ end
+ if dx ~= 0 then
+ cx[n] = dx
+ rl[n] = rlmode
+ end
+ -- if rlmode and rlmode < 0 then
+ dy = -dy
+ -- end
+ maxt = maxt + 1
+ t[maxt] = p
+ d[maxt] = dy
+ else
+ maxt = 0
+ end
+ end
+ elseif maxt > 0 then
+ local ny = n.yoffset
+ for i=maxt,1,-1 do
+ ny = ny + d[i]
+ local ti = t[i]
+ ti.yoffset = ti.yoffset + ny
+ end
+ maxt = 0
+ end
+ if not n_cursbase and maxt > 0 then
+ local ny = n.yoffset
+ for i=maxt,1,-1 do
+ ny = ny + d[i]
+ local ti = t[i]
+ ti.yoffset = ny
+ end
+ maxt = 0
+ end
+ p_cursbase, p = n_cursbase, n
+ end
+ end
+ if maxt > 0 then
+ local ny = n.yoffset
+ for i=maxt,1,-1 do
+ ny = ny + d[i]
+ local ti = t[i]
+ ti.yoffset = ny
+ end
+ maxt = 0
+ end
+ if not keep then
+ cursives = { }
+ end
+ end
+ if has_marks then
+ for i=1,nofvalid do
+ local p = valid[i]
+ local p_markbase = p[a_markbase]
+ if p_markbase then
+ local mrks = marks[p_markbase]
+ local nofmarks = #mrks
+ for n in traverse_id(glyph_code,p.next) do
+ local n_markmark = n[a_markmark]
+ if p_markbase == n_markmark then
+ local index = n[a_markdone] or 1
+ local d = mrks[index]
+ if d then
+ local rlmode = d[3]
+ --
+ local k = wx[p]
+ if k then
+ local x = k[2]
+ local w = k[4]
+ if w then
+ if rlmode and rlmode >= 0 then
+ -- kern(x) glyph(p) kern(w-x) mark(n)
+ n.xoffset = p.xoffset - p.width + d[1] - (w-x)
+ else
+ -- kern(w-x) glyph(p) kern(x) mark(n)
+ n.xoffset = p.xoffset - d[1] - x
+ end
+ else
+ if rlmode and rlmode >= 0 then
+ -- okay for husayni
+ n.xoffset = p.xoffset - p.width + d[1]
+ else
+ -- needs checking: is x ok here?
+ n.xoffset = p.xoffset - d[1] - x
+ end
+ end
+ else
+ if rlmode and rlmode >= 0 then
+ n.xoffset = p.xoffset - p.width + d[1]
+ else
+ n.xoffset = p.xoffset - d[1]
+ end
+ local w = n.width
+ if w ~= 0 then
+ insert_node_before(head,n,newkern(-w/2))
+ insert_node_after(head,n,newkern(-w/2))
+ end
+ end
+ -- --
+ if mk[p] then
+ n.yoffset = p.yoffset + d[2]
+ else
+ n.yoffset = n.yoffset + p.yoffset + d[2]
+ end
+ --
+ if nofmarks == 1 then
+ break
+ else
+ nofmarks = nofmarks - 1
+ end
+ end
+ else
+ -- KE: there can be <mark> <mkmk> <mark> sequences in ligatures
+ end
+ end
+ end
+ end
+ if not keep then
+ marks = { }
+ end
+ end
+ -- todo : combine
+ if next(wx) then
+ for n, k in next, wx do
+ -- only w can be nil (kernclasses), can be sped up when w == nil
+ local x = k[2]
+ local w = k[4]
+ if w then
+ local rl = k[1] -- r2l = k[6]
+ local wx = w - x
+ if rl < 0 then -- KE: don't use r2l here
+ if wx ~= 0 then
+ insert_node_before(head,n,newkern(wx)) -- type 0/2
+ end
+ if x ~= 0 then
+ insert_node_after (head,n,newkern(x)) -- type 0/2
+ end
+ else
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x)) -- type 0/2
+ end
+ if wx ~= 0 then
+ insert_node_after (head,n,newkern(wx)) -- type 0/2
+ end
+ end
+ elseif x ~= 0 then
+ -- this needs checking for rl < 0 but it is unlikely that a r2l script
+ -- uses kernclasses between glyphs so we're probably safe (KE has a
+ -- problematic font where marks interfere with rl < 0 in the previous
+ -- case)
+ insert_node_before(head,n,newkern(x)) -- a real font kern, type 0
+ end
+ end
+ end
+ if next(cx) then
+ for n, k in next, cx do
+ if k ~= 0 then
+ local rln = rl[n]
+ if rln and rln < 0 then
+ insert_node_before(head,n,newkern(-k)) -- type 0/2
+ else
+ insert_node_before(head,n,newkern(k)) -- type 0/2
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns = { }
+ end
+ -- if trace_injections then
+ -- show_result(head)
+ -- end
+ return head, true
+ elseif not keep then
+ kerns, cursives, marks = { }, { }, { }
+ end
+ elseif has_kerns then
+ if trace_injections then
+ trace(head)
+ end
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype < 256 then
+ local k = n[a_kernpair]
+ if k then
+ local kk = kerns[k]
+ if kk then
+ local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
+ if y and y ~= 0 then
+ n.yoffset = y -- todo: h ?
+ end
+ if w then
+ -- copied from above
+ -- local r2l = kk[6]
+ local wx = w - x
+ if rl < 0 then -- KE: don't use r2l here
+ if wx ~= 0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x ~= 0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx ~= 0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
+ else
+ -- simple (e.g. kernclass kerns)
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ end
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns = { }
+ end
+ -- if trace_injections then
+ -- show_result(head)
+ -- end
+ return head, true
+ else
+ -- no tracing needed
+ end
+ return head, false
+end
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua
index cf5862ca9f4..dd98686267a 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 05/28/13 00:34:00
+-- merge date : 04/28/14 23:24:10
do -- begin closure to overcome local limits and interference
@@ -82,6 +82,9 @@ function optionalrequire(...)
return result
end
end
+if lua then
+ lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
+end
end -- closure
@@ -95,12 +98,15 @@ if not modules then modules={} end modules ['l-lpeg']={
license="see context related readme files"
}
lpeg=require("lpeg")
+if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end
local type,next,tostring=type,next,tostring
local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
local floor=math.floor
local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
-setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+if setinspector then
+ setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+end
lpeg.patterns=lpeg.patterns or {}
local patterns=lpeg.patterns
local anything=P(1)
@@ -110,28 +116,46 @@ patterns.anything=anything
patterns.endofstring=endofstring
patterns.beginofstring=alwaysmatched
patterns.alwaysmatched=alwaysmatched
-local digit,sign=R('09'),S('+-')
+local sign=S('+-')
+local zero=P('0')
+local digit=R('09')
+local octdigit=R("07")
+local lowercase=R("az")
+local uppercase=R("AZ")
+local underscore=P("_")
+local hexdigit=digit+lowercase+uppercase
local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
local newline=crlf+S("\r\n")
local escaped=P("\\")*anything
local squote=P("'")
local dquote=P('"')
local space=P(" ")
-local utfbom_32_be=P('\000\000\254\255')
-local utfbom_32_le=P('\255\254\000\000')
-local utfbom_16_be=P('\255\254')
-local utfbom_16_le=P('\254\255')
-local utfbom_8=P('\239\187\191')
+local period=P(".")
+local comma=P(",")
+local utfbom_32_be=P('\000\000\254\255')
+local utfbom_32_le=P('\255\254\000\000')
+local utfbom_16_be=P('\254\255')
+local utfbom_16_le=P('\255\254')
+local utfbom_8=P('\239\187\191')
local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
+local utfstricttype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")
local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
local utf8next=R("\128\191")
+patterns.utfbom_32_be=utfbom_32_be
+patterns.utfbom_32_le=utfbom_32_le
+patterns.utfbom_16_be=utfbom_16_be
+patterns.utfbom_16_le=utfbom_16_le
+patterns.utfbom_8=utfbom_8
+patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
+patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
patterns.utf8one=R("\000\127")
patterns.utf8two=R("\194\223")*utf8next
patterns.utf8three=R("\224\239")*utf8next*utf8next
patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
patterns.utfbom=utfbom
patterns.utftype=utftype
+patterns.utfstricttype=utfstricttype
patterns.utfoffset=utfoffset
local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
@@ -151,27 +175,14 @@ patterns.spacer=spacer
patterns.whitespace=whitespace
patterns.nonspacer=nonspacer
patterns.nonwhitespace=nonwhitespace
-local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
patterns.stripper=stripper
+patterns.fullstripper=fullstripper
patterns.collapser=collapser
-patterns.digit=digit
-patterns.sign=sign
-patterns.cardinal=sign^0*digit^1
-patterns.integer=sign^0*digit^1
-patterns.unsigned=digit^0*P('.')*digit^1
-patterns.float=sign^0*patterns.unsigned
-patterns.cunsigned=digit^0*P(',')*digit^1
-patterns.cfloat=sign^0*patterns.cunsigned
-patterns.number=patterns.float+patterns.integer
-patterns.cnumber=patterns.cfloat+patterns.integer
-patterns.oct=P("0")*R("07")^1
-patterns.octal=patterns.oct
-patterns.HEX=P("0x")*R("09","AF")^1
-patterns.hex=P("0x")*R("09","af")^1
-patterns.hexadecimal=P("0x")*R("09","AF","af")^1
-patterns.lowercase=R("az")
-patterns.uppercase=R("AZ")
+patterns.lowercase=lowercase
+patterns.uppercase=uppercase
patterns.letter=patterns.lowercase+patterns.uppercase
patterns.space=space
patterns.tab=P("\t")
@@ -179,12 +190,12 @@ patterns.spaceortab=patterns.space+patterns.tab
patterns.newline=newline
patterns.emptyline=newline^1
patterns.equal=P("=")
-patterns.comma=P(",")
-patterns.commaspacer=P(",")*spacer^0
-patterns.period=P(".")
+patterns.comma=comma
+patterns.commaspacer=comma*spacer^0
+patterns.period=period
patterns.colon=P(":")
patterns.semicolon=P(";")
-patterns.underscore=P("_")
+patterns.underscore=underscore
patterns.escaped=escaped
patterns.squote=squote
patterns.dquote=dquote
@@ -197,10 +208,29 @@ patterns.unspacer=((patterns.spacer^1)/"")^0
patterns.singlequoted=squote*patterns.nosquote*squote
patterns.doublequoted=dquote*patterns.nodquote*dquote
patterns.quoted=patterns.doublequoted+patterns.singlequoted
-patterns.propername=R("AZ","az","__")*R("09","AZ","az","__")^0*P(-1)
+patterns.digit=digit
+patterns.octdigit=octdigit
+patterns.hexdigit=hexdigit
+patterns.sign=sign
+patterns.cardinal=digit^1
+patterns.integer=sign^-1*digit^1
+patterns.unsigned=digit^0*period*digit^1
+patterns.float=sign^-1*patterns.unsigned
+patterns.cunsigned=digit^0*comma*digit^1
+patterns.cfloat=sign^-1*patterns.cunsigned
+patterns.number=patterns.float+patterns.integer
+patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.oct=zero*octdigit^1
+patterns.octal=patterns.oct
+patterns.HEX=zero*P("X")*(digit+uppercase)^1
+patterns.hex=zero*P("x")*(digit+lowercase)^1
+patterns.hexadecimal=zero*S("xX")*hexdigit^1
+patterns.hexafloat=sign^-1*zero*S("xX")*(hexdigit^0*period*hexdigit^1+hexdigit^1*period*hexdigit^0+hexdigit^1)*(S("pP")*sign^-1*hexdigit^1)^-1
+patterns.decafloat=sign^-1*(digit^0*period*digit^1+digit^1*period*digit^0+digit^1)*S("eE")*sign^-1*digit^1
+patterns.propername=(uppercase+lowercase+underscore)*(uppercase+lowercase+underscore+digit)^0*endofstring
patterns.somecontent=(anything-newline-space)^1
patterns.beginline=#(1-newline)
-patterns.longtostring=Cs(whitespace^0/""*nonwhitespace^0*((whitespace^0/" "*(patterns.quoted+nonwhitespace)^1)^0))
+patterns.longtostring=Cs(whitespace^0/""*((patterns.quoted+nonwhitespace^1+whitespace^1/""*(P(-1)+Cc(" ")))^0))
local function anywhere(pattern)
return P { P(pattern)+1*V(1) }
end
@@ -372,7 +402,7 @@ function lpeg.replacer(one,two,makefunction,isutf)
return pattern
end
end
-function lpeg.finder(lst,makefunction)
+function lpeg.finder(lst,makefunction,isutf)
local pattern
if type(lst)=="table" then
pattern=P(false)
@@ -388,7 +418,11 @@ function lpeg.finder(lst,makefunction)
else
pattern=P(lst)
end
- pattern=(1-pattern)^0*pattern
+ if isutf then
+ pattern=((utf8char or 1)-pattern)^0*pattern
+ else
+ pattern=(1-pattern)^0*pattern
+ end
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -401,8 +435,8 @@ local splitters_f,splitters_s={},{}
function lpeg.firstofsplit(separator)
local splitter=splitters_f[separator]
if not splitter then
- separator=P(separator)
- splitter=C((1-separator)^0)
+ local pattern=P(separator)
+ splitter=C((1-pattern)^0)
splitters_f[separator]=splitter
end
return splitter
@@ -410,12 +444,31 @@ end
function lpeg.secondofsplit(separator)
local splitter=splitters_s[separator]
if not splitter then
- separator=P(separator)
- splitter=(1-separator)^0*separator*C(anything^0)
+ local pattern=P(separator)
+ splitter=(1-pattern)^0*pattern*C(anything^0)
splitters_s[separator]=splitter
end
return splitter
end
+local splitters_s,splitters_p={},{}
+function lpeg.beforesuffix(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=C((1-pattern)^0)*pattern*endofstring
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.afterprefix(separator)
+ local splitter=splitters_p[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=pattern*C(anything^0)
+ splitters_p[separator]=splitter
+ end
+ return splitter
+end
function lpeg.balancer(left,right)
left,right=P(left),P(right)
return P { left*((1-left-right)+V(1))^0*right }
@@ -647,9 +700,6 @@ end
function lpeg.times(pattern,n)
return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
end
-local digit=R("09")
-local period=P(".")
-local zero=P("0")
local trailingzeros=zero^0*-digit
local case_1=period*trailingzeros/""
local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
@@ -709,11 +759,15 @@ function string.limit(str,n,sentinel)
end
end
local stripper=patterns.stripper
+local fullstripper=patterns.fullstripper
local collapser=patterns.collapser
local longtostring=patterns.longtostring
function string.strip(str)
return lpegmatch(stripper,str) or ""
end
+function string.fullstrip(str)
+ return lpegmatch(fullstripper,str) or ""
+end
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -847,6 +901,36 @@ local function sortedkeys(tab)
return {}
end
end
+local function sortedhashonly(tab)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if type(key)=="string" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt)
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedindexonly(tab)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if type(key)=="number" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt)
+ return srt
+ else
+ return {}
+ end
+end
local function sortedhashkeys(tab,cmp)
if tab then
local srt,s={},0
@@ -872,6 +956,8 @@ function table.allkeys(t)
return sortedkeys(keys)
end
table.sortedkeys=sortedkeys
+table.sortedhashonly=sortedhashonly
+table.sortedindexonly=sortedindexonly
table.sortedhashkeys=sortedhashkeys
local function nothing() end
local function sortedhash(t,cmp)
@@ -883,10 +969,13 @@ local function sortedhash(t,cmp)
s=sortedkeys(t)
end
local n=0
+ local m=#s
local function kv(s)
- n=n+1
- local k=s[n]
- return k,t[k]
+ if n<m then
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
end
return kv,s
else
@@ -1037,6 +1126,7 @@ local noquotes,hexify,handle,reduce,compact,inline,functions
local reserved=table.tohash {
'and','break','do','else','elseif','end','false','for','function','if',
'in','local','nil','not','or','repeat','return','then','true','until','while',
+ 'NaN','goto',
}
local function simple_table(t)
if #t>0 then
@@ -1056,12 +1146,12 @@ local function simple_table(t)
else
tt[nt]=tostring(v)
end
- elseif tv=="boolean" then
- nt=nt+1
- tt[nt]=tostring(v)
elseif tv=="string" then
nt=nt+1
tt[nt]=format("%q",v)
+ elseif tv=="boolean" then
+ nt=nt+1
+ tt[nt]=v and "true" or "false"
else
tt=nil
break
@@ -1094,7 +1184,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s[%q]={",depth,name))
end
elseif tn=="boolean" then
- handle(format("%s[%s]={",depth,tostring(name)))
+ handle(format("%s[%s]={",depth,name and "true" or "false"))
else
handle(format("%s{",depth))
end
@@ -1118,21 +1208,21 @@ local function do_serialize(root,name,depth,level,indexed)
for i=1,#sk do
local k=sk[i]
local v=root[k]
- local t,tk=type(v),type(k)
+ local tv,tk=type(v),type(k)
if compact and first and tk=="number" and k>=first and k<=last then
- if t=="number" then
+ if tv=="number" then
if hexify then
handle(format("%s 0x%04X,",depth,v))
else
handle(format("%s %s,",depth,v))
end
- elseif t=="string" then
+ elseif tv=="string" then
if reduce and tonumber(v) then
handle(format("%s %s,",depth,v))
else
handle(format("%s %q,",depth,v))
end
- elseif t=="table" then
+ elseif tv=="table" then
if not next(v) then
handle(format("%s {},",depth))
elseif inline then
@@ -1145,11 +1235,11 @@ local function do_serialize(root,name,depth,level,indexed)
else
do_serialize(v,k,depth,level+1,true)
end
- elseif t=="boolean" then
- handle(format("%s %s,",depth,tostring(v)))
- elseif t=="function" then
+ elseif tv=="boolean" then
+ handle(format("%s %s,",depth,v and "true" or "false"))
+ elseif tv=="function" then
if functions then
- handle(format('%s load(%q),',depth,dump(v)))
+ handle(format('%s load(%q),',depth,dump(v)))
else
handle(format('%s "function",',depth))
end
@@ -1160,7 +1250,7 @@ local function do_serialize(root,name,depth,level,indexed)
if false then
handle(format("%s __p__=nil,",depth))
end
- elseif t=="number" then
+ elseif tv=="number" then
if tk=="number" then
if hexify then
handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
@@ -1169,9 +1259,9 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif tk=="boolean" then
if hexify then
- handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
+ handle(format("%s [%s]=0x%04X,",depth,k and "true" or "false",v))
else
- handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
end
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
if hexify then
@@ -1186,7 +1276,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%q]=%s,",depth,k,v))
end
end
- elseif t=="string" then
+ elseif tv=="string" then
if reduce and tonumber(v) then
if tk=="number" then
if hexify then
@@ -1195,7 +1285,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]=%s,",depth,k,v))
end
elseif tk=="boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=%s,",depth,k,v))
else
@@ -1209,14 +1299,14 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]=%q,",depth,k,v))
end
elseif tk=="boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),v))
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=%q,",depth,k,v))
else
handle(format("%s [%q]=%q,",depth,k,v))
end
end
- elseif t=="table" then
+ elseif tv=="table" then
if not next(v) then
if tk=="number" then
if hexify then
@@ -1225,7 +1315,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]={},",depth,k))
end
elseif tk=="boolean" then
- handle(format("%s [%s]={},",depth,tostring(k)))
+ handle(format("%s [%s]={},",depth,k and "true" or "false"))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s={},",depth,k))
else
@@ -1241,7 +1331,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
end
elseif tk=="boolean" then
- handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
+ handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", ")))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
else
@@ -1253,21 +1343,21 @@ local function do_serialize(root,name,depth,level,indexed)
else
do_serialize(v,k,depth,level+1)
end
- elseif t=="boolean" then
+ elseif tv=="boolean" then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
+ handle(format("%s [0x%04X]=%s,",depth,k,v and "true" or "false"))
else
- handle(format("%s [%s]=%s,",depth,k,tostring(v)))
+ handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
end
elseif tk=="boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
+ handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false"))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,tostring(v)))
+ handle(format("%s %s=%s,",depth,k,v and "true" or "false"))
else
- handle(format("%s [%q]=%s,",depth,k,tostring(v)))
+ handle(format("%s [%q]=%s,",depth,k,v and "true" or "false"))
end
- elseif t=="function" then
+ elseif tv=="function" then
if functions then
local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
if tk=="number" then
@@ -1277,7 +1367,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]=load(%q),",depth,k,f))
end
elseif tk=="boolean" then
- handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
+ handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=load(%q),",depth,k,f))
else
@@ -1292,7 +1382,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]=%q,",depth,k,tostring(v)))
end
elseif tk=="boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v)))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=%q,",depth,k,tostring(v)))
else
@@ -1593,7 +1683,9 @@ function table.print(t,...)
serialize(print,t,...)
end
end
-setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+if setinspector then
+ setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+end
function table.sub(t,i,j)
return { unpack(t,i,j) }
end
@@ -1628,6 +1720,24 @@ function table.sorted(t,...)
sort(t,...)
return t
end
+function table.values(t,s)
+ if t then
+ local values,keys,v={},{},0
+ for key,value in next,t do
+ if not keys[value] then
+ v=v+1
+ values[v]=value
+ keys[k]=key
+ end
+ end
+ if s then
+ sort(values)
+ end
+ return values
+ else
+ return {}
+ end
+end
end -- closure
@@ -1645,7 +1755,7 @@ local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
local concat=table.concat
local floor=math.floor
local type=type
-if string.find(os.getenv("PATH"),";") then
+if string.find(os.getenv("PATH"),";",1,true) then
io.fileseparator,io.pathseparator="\\",";"
else
io.fileseparator,io.pathseparator="/",":"
@@ -1662,6 +1772,7 @@ local function readall(f)
return f:read('*all')
else
local done=f:seek("set",0)
+ local step
if size<1024*1024 then
step=1024*1024
elseif size>16*1024*1024 then
@@ -2185,17 +2296,24 @@ end
function file.joinpath(tab,separator)
return tab and concat(tab,separator or io.pathseparator)
end
+local someslash=S("\\/")
local stripper=Cs(P(fwslash)^0/""*reslasher)
-local isnetwork=fwslash*fwslash*(1-fwslash)+(1-fwslash-colon)^1*colon
+local isnetwork=someslash*someslash*(1-someslash)+(1-fwslash-colon)^1*colon
local isroot=fwslash^1*-1
local hasroot=fwslash^1
+local reslasher=lpeg.replacer(S("\\/"),"/")
local deslasher=lpeg.replacer(S("\\/")^1,"/")
function file.join(...)
local lst={... }
local one=lst[1]
if lpegmatch(isnetwork,one) then
+ local one=lpegmatch(reslasher,one)
local two=lpegmatch(deslasher,concat(lst,"/",2))
- return one.."/"..two
+ if lpegmatch(hasroot,two) then
+ return one..two
+ else
+ return one.."/"..two
+ end
elseif lpegmatch(isroot,one) then
local two=lpegmatch(deslasher,concat(lst,"/",2))
if lpegmatch(hasroot,two) then
@@ -2212,7 +2330,9 @@ end
local drivespec=R("az","AZ")^1*colon
local anchors=fwslash+drivespec
local untouched=periods+(1-period)^1*P(-1)
-local splitstarter=(Cs(drivespec*(bwslash/"/"+fwslash)^0)+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
+local mswindrive=Cs(drivespec*(bwslash/"/"+fwslash)^0)
+local mswinuncpath=(bwslash+fwslash)*(bwslash+fwslash)*Cc("//")
+local splitstarter=(mswindrive+mswinuncpath+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
local absolute=fwslash
function file.collapsepath(str,anchor)
if not str then
@@ -2375,9 +2495,9 @@ function string.booleanstring(str)
end
function string.is_boolean(str,default)
if type(str)=="string" then
- if str=="true" or str=="yes" or str=="on" or str=="t" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" or str=="1" then
return true
- elseif str=="false" or str=="no" or str=="off" or str=="f" then
+ elseif str=="false" or str=="no" or str=="off" or str=="f" or str=="0" then
return false
end
end
@@ -2437,15 +2557,28 @@ local unpack,concat=table.unpack,table.concat
local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
local patterns,lpegmatch=lpeg.patterns,lpeg.match
local utfchar,utfbyte=utf.char,utf.byte
-local loadstripped=_LUAVERSION<5.2 and load or function(str)
- return load(dump(load(str),true))
+local loadstripped=nil
+if _LUAVERSION<5.2 then
+ loadstripped=function(str,shortcuts)
+ return load(str)
+ end
+else
+ loadstripped=function(str,shortcuts)
+ if shortcuts then
+ return load(dump(load(str),true),nil,nil,shortcuts)
+ else
+ return load(dump(load(str),true))
+ end
+ end
end
if not number then number={} end
local stripper=patterns.stripzeros
local function points(n)
+ n=tonumber(n)
return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
end
local function basepoints(n)
+ n=tonumber(n)
return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
end
number.points=points
@@ -2508,11 +2641,39 @@ local pattern=Carg(1)/function(t)
function strings.tabtospace(str,tab)
return lpegmatch(pattern,str,1,tab or 7)
end
-function strings.striplong(str)
- str=gsub(str,"^%s*","")
- str=gsub(str,"[\n\r]+ *","\n")
- return str
+local newline=patterns.newline
+local endofstring=patterns.endofstring
+local whitespace=patterns.whitespace
+local spacer=patterns.spacer
+local space=spacer^0
+local nospace=space/""
+local endofline=nospace*newline
+local stripend=(whitespace^1*endofstring)/""
+local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace)
+local stripempty=endofline^1/""
+local normalempty=endofline^1
+local singleempty=endofline*(endofline^0/"")
+local doubleempty=endofline*endofline^-1*(endofline^0/"")
+local stripstart=stripempty^0
+local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 )
+local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 )
+local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 )
+local p_retain_normal=Cs ((normalline+normalempty )^0 )
+local p_retain_collapse=Cs ((normalline+doubleempty )^0 )
+local p_retain_noempty=Cs ((normalline+singleempty )^0 )
+local striplinepatterns={
+ ["prune"]=p_prune_normal,
+ ["prune and collapse"]=p_prune_collapse,
+ ["prune and no empty"]=p_prune_noempty,
+ ["retain"]=p_retain_normal,
+ ["retain and collapse"]=p_retain_collapse,
+ ["retain and no empty"]=p_retain_noempty,
+}
+strings.striplinepatterns=striplinepatterns
+function strings.striplines(str,how)
+ return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str
end
+strings.striplong=strings.striplines
function strings.nice(str)
str=gsub(str,"[:%-+_]+"," ")
return str
@@ -2563,29 +2724,83 @@ function number.signed(i)
return "-",-i
end
end
-local preamble=[[
-local type = type
-local tostring = tostring
-local tonumber = tonumber
-local format = string.format
-local concat = table.concat
-local signed = number.signed
-local points = number.points
-local basepoints = number.basepoints
-local utfchar = utf.char
-local utfbyte = utf.byte
-local lpegmatch = lpeg.match
-local nspaces = string.nspaces
-local tracedchar = string.tracedchar
-local autosingle = string.autosingle
-local autodouble = string.autodouble
-local sequenced = table.sequenced
-]]
+local zero=P("0")^1/""
+local plus=P("+")/""
+local minus=P("-")
+local separator=S(".")
+local digit=R("09")
+local trailing=zero^1*#S("eE")
+local exponent=(S("eE")*(plus+Cs((minus*zero^0*P(-1))/"")+minus)*zero^0*(P(-1)*Cc("0")+P(1)^1))
+local pattern_a=Cs(minus^0*digit^1*(separator/""*trailing+separator*(trailing+digit)^0)*exponent)
+local pattern_b=Cs((exponent+P(1))^0)
+function number.sparseexponent(f,n)
+ if not n then
+ n=f
+ f="%e"
+ end
+ local tn=type(n)
+ if tn=="string" then
+ local m=tonumber(n)
+ if m then
+ return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,m))
+ end
+ elseif tn=="number" then
+ return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,n))
+ end
+ return tostring(n)
+end
local template=[[
%s
%s
return function(%s) return %s end
]]
+local preamble,environment="",{}
+if _LUAVERSION<5.2 then
+ preamble=[[
+local lpeg=lpeg
+local type=type
+local tostring=tostring
+local tonumber=tonumber
+local format=string.format
+local concat=table.concat
+local signed=number.signed
+local points=number.points
+local basepoints= number.basepoints
+local utfchar=utf.char
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local nspaces=string.nspaces
+local tracedchar=string.tracedchar
+local autosingle=string.autosingle
+local autodouble=string.autodouble
+local sequenced=table.sequenced
+local formattednumber=number.formatted
+local sparseexponent=number.sparseexponent
+ ]]
+else
+ environment={
+ global=global or _G,
+ lpeg=lpeg,
+ type=type,
+ tostring=tostring,
+ tonumber=tonumber,
+ format=string.format,
+ concat=table.concat,
+ signed=number.signed,
+ points=number.points,
+ basepoints=number.basepoints,
+ utfchar=utf.char,
+ utfbyte=utf.byte,
+ lpegmatch=lpeg.match,
+ nspaces=string.nspaces,
+ tracedchar=string.tracedchar,
+ autosingle=string.autosingle,
+ autodouble=string.autodouble,
+ sequenced=table.sequenced,
+ formattednumber=number.formatted,
+ sparseexponent=number.sparseexponent,
+ }
+end
local arguments={ "a1" }
setmetatable(arguments,{ __index=function(t,k)
local v=t[k-1]..",a"..k
@@ -2594,7 +2809,7 @@ setmetatable(arguments,{ __index=function(t,k)
end
})
local prefix_any=C((S("+- .")+R("09"))^0)
-local prefix_tab=C((1-R("az","AZ","09","%%"))^0)
+local prefix_tab=P("{")*C((1-P("}"))^0)*P("}")+C((1-R("az","AZ","09","%%"))^0)
local format_s=function(f)
n=n+1
if f and f~="" then
@@ -2624,7 +2839,7 @@ local format_i=function(f)
if f and f~="" then
return format("format('%%%si',a%s)",f,n)
else
- return format("a%s",n)
+ return format("format('%%i',a%s)",n)
end
end
local format_d=format_i
@@ -2636,6 +2851,10 @@ local format_f=function(f)
n=n+1
return format("format('%%%sf',a%s)",f,n)
end
+local format_F=function(f)
+ n=n+1
+ return format("((a%s == 0 and '0') or (a%s == 1 and '1') or format('%%%sf',a%s))",n,n,f,n)
+end
local format_g=function(f)
n=n+1
return format("format('%%%sg',a%s)",f,n)
@@ -2652,6 +2871,14 @@ local format_E=function(f)
n=n+1
return format("format('%%%sE',a%s)",f,n)
end
+local format_j=function(f)
+ n=n+1
+ return format("sparseexponent('%%%se',a%s)",f,n)
+end
+local format_J=function(f)
+ n=n+1
+ return format("sparseexponent('%%%sE',a%s)",f,n)
+end
local format_x=function(f)
n=n+1
return format("format('%%%sx',a%s)",f,n)
@@ -2776,6 +3003,43 @@ end
local format_W=function(f)
return format("nspaces[%s]",tonumber(f) or 0)
end
+local digit=patterns.digit
+local period=patterns.period
+local three=digit*digit*digit
+local splitter=Cs (
+ (((1-(three^1*period))^1+C(three))*(Carg(1)*three)^1+C((1-period)^1))*(P(1)/""*Carg(2))*C(2)
+)
+patterns.formattednumber=splitter
+function number.formatted(n,sep1,sep2)
+ local s=type(s)=="string" and n or format("%0.2f",n)
+ if sep1==true then
+ return lpegmatch(splitter,s,1,".",",")
+ elseif sep1=="." then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ",")
+ elseif sep1=="," then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ".")
+ else
+ return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".")
+ end
+end
+local format_m=function(f)
+ n=n+1
+ if not f or f=="" then
+ f=","
+ end
+ return format([[formattednumber(a%s,%q,".")]],n,f)
+end
+local format_M=function(f)
+ n=n+1
+ if not f or f=="" then
+ f="."
+ end
+ return format([[formattednumber(a%s,%q,",")]],n,f)
+end
+local format_z=function(f)
+ n=n+(tonumber(f) or 1)
+ return "''"
+end
local format_rest=function(s)
return format("%q",s)
end
@@ -2805,15 +3069,17 @@ local builder=Cs { "start",
(
P("%")/""*(
V("!")
-+V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
+V("c")+V("C")+V("S")
+V("Q")
+V("N")
-+V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("h")
-+V("w")
++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("w")
+V("W")
+V("a")
-+V("A")
++V("A")
++V("j")+V("J")
++V("m")+V("M")
++V("z")
+V("*")
)+V("*")
)*(P(-1)+Carg(1))
@@ -2823,6 +3089,7 @@ local builder=Cs { "start",
["i"]=(prefix_any*P("i"))/format_i,
["d"]=(prefix_any*P("d"))/format_d,
["f"]=(prefix_any*P("f"))/format_f,
+ ["F"]=(prefix_any*P("F"))/format_F,
["g"]=(prefix_any*P("g"))/format_g,
["G"]=(prefix_any*P("G"))/format_G,
["e"]=(prefix_any*P("e"))/format_e,
@@ -2844,19 +3111,24 @@ local builder=Cs { "start",
["b"]=(prefix_any*P("b"))/format_b,
["t"]=(prefix_tab*P("t"))/format_t,
["T"]=(prefix_tab*P("T"))/format_T,
- ["l"]=(prefix_tab*P("l"))/format_l,
- ["L"]=(prefix_tab*P("L"))/format_L,
+ ["l"]=(prefix_any*P("l"))/format_l,
+ ["L"]=(prefix_any*P("L"))/format_L,
["I"]=(prefix_any*P("I"))/format_I,
["w"]=(prefix_any*P("w"))/format_w,
["W"]=(prefix_any*P("W"))/format_W,
+ ["j"]=(prefix_any*P("j"))/format_j,
+ ["J"]=(prefix_any*P("J"))/format_J,
+ ["m"]=(prefix_tab*P("m"))/format_m,
+ ["M"]=(prefix_tab*P("M"))/format_M,
+ ["z"]=(prefix_any*P("z"))/format_z,
["a"]=(prefix_any*P("a"))/format_a,
["A"]=(prefix_any*P("A"))/format_A,
- ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%%%")^1)/format_rest,
+ ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest,
["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
}
local direct=Cs (
- P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
- )
+ P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
+)
local function make(t,str)
local f
local p
@@ -2865,10 +3137,10 @@ local function make(t,str)
f=loadstripped(p)()
else
n=0
- p=lpegmatch(builder,str,1,"..",t._extensions_)
+ p=lpegmatch(builder,str,1,t._connector_,t._extensions_)
if n>0 then
p=format(template,preamble,t._preamble_,arguments[n],p)
- f=loadstripped(p)()
+ f=loadstripped(p,t._environment_)()
else
f=function() return str end
end
@@ -2880,10 +3152,22 @@ local function use(t,fmt,...)
return t[fmt](...)
end
strings.formatters={}
-function strings.formatters.new()
- local t={ _extensions_={},_preamble_="",_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
+if _LUAVERSION<5.2 then
+ function strings.formatters.new(noconcat)
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
+else
+ function strings.formatters.new(noconcat)
+ local e={}
+ for k,v in next,environment do
+ e[k]=v
+ end
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
end
local formatters=strings.formatters.new()
string.formatters=formatters
@@ -2891,16 +3175,29 @@ string.formatter=function(str,...) return formatters[str](...) end
local function add(t,name,template,preamble)
if type(t)=="table" and t._type_=="formatter" then
t._extensions_[name]=template or "%s"
- if preamble then
+ if type(preamble)=="string" then
t._preamble_=preamble.."\n"..t._preamble_
+ elseif type(preamble)=="table" then
+ for k,v in next,preamble do
+ t._environment_[k]=v
+ end
end
end
end
strings.formatters.add=add
-lpeg.patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;"+P(1))^0)
-lpeg.patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
-add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
-add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;"+P(1))^0)
+patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
+patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
+patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
+if _LUAVERSION<5.2 then
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+else
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
+end
end -- closure
@@ -2979,6 +3276,7 @@ local remapper={
fea="font feature files",
pfa="type1 fonts",
pfb="type1 fonts",
+ afm="afm",
}
function resolvers.findfile(name,fileformat)
name=string.gsub(name,"\\","/")
@@ -2997,6 +3295,10 @@ function resolvers.findfile(name,fileformat)
return found
end
resolvers.findbinfile=resolvers.findfile
+function resolvers.loadbinfile(filename,filetype)
+ local data=io.loaddata(filename)
+ return true,data,#data
+end
function resolvers.resolve(s)
return s
end
@@ -3012,13 +3314,22 @@ if not caches.namespace or caches.namespace=="" or caches.namespace=="context" t
end
do
local cachepaths=kpse.expand_var('$TEXMFCACHE') or ""
- if cachepaths=="" then
+ if cachepaths=="" or cachepaths=="$TEXMFCACHE" then
cachepaths=kpse.expand_var('$TEXMFVAR') or ""
end
- if cachepaths=="" then
+ if cachepaths=="" or cachepaths=="$TEXMFVAR" then
cachepaths=kpse.expand_var('$VARTEXMF') or ""
end
if cachepaths=="" then
+ local fallbacks={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" }
+ for i=1,#fallbacks do
+ cachepaths=os.getenv(fallbacks[i]) or ""
+ if cachepath~="" and lfs.isdir(cachepath) then
+ break
+ end
+ end
+ end
+ if cachepaths=="" then
cachepaths="."
end
cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":")
@@ -3083,6 +3394,17 @@ function caches.loaddata(paths,name)
for i=1,#paths do
local data=false
local luaname,lucname=makefullname(paths[i],name)
+ if lucname and not lfs.isfile(lucname) and type(caches.compile)=="function" then
+ texio.write(string.format("(compiling luc: %s)",lucname))
+ data=loadfile(luaname)
+ if data then
+ data=data()
+ end
+ if data then
+ caches.compile(data,luaname,lucname)
+ return data
+ end
+ end
if lucname and lfs.isfile(lucname) then
texio.write(string.format("(load luc: %s)",lucname))
data=loadfile(lucname)
@@ -3111,7 +3433,7 @@ function caches.savedata(path,name,data)
local luaname,lucname=makefullname(path,name)
if luaname then
texio.write(string.format("(save: %s)",luaname))
- table.tofile(luaname,data,true,{ reduce=true })
+ table.tofile(luaname,data,true)
if lucname and type(caches.compile)=="function" then
os.remove(lucname)
texio.write(string.format("(save: %s)",lucname))
@@ -3296,17 +3618,27 @@ nodes.handlers={}
local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end
local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end
local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" }
+local disccodes={ [0]="discretionary","explicit","automatic","regular","first","second" }
nodes.nodecodes=nodecodes
nodes.whatcodes=whatcodes
nodes.whatsitcodes=whatcodes
nodes.glyphcodes=glyphcodes
+nodes.disccodes=disccodes
local free_node=node.free
local remove_node=node.remove
local new_node=node.new
local traverse_id=node.traverse_id
-local math_code=nodecodes.math
nodes.handlers.protectglyphs=node.protect_glyphs
nodes.handlers.unprotectglyphs=node.unprotect_glyphs
+local math_code=nodecodes.math
+local end_of_math=node.end_of_math
+function node.end_of_math(n)
+ if n.id==math_code and n.subtype==1 then
+ return n
+ else
+ return end_of_math(n)
+ end
+end
function nodes.remove(head,current,free_too)
local t=current
head,current=remove_node(head,current)
@@ -3323,18 +3655,63 @@ end
function nodes.delete(head,current)
return nodes.remove(head,current,true)
end
-nodes.before=node.insert_before
-nodes.after=node.insert_after
function nodes.pool.kern(k)
local n=new_node("kern",1)
n.kern=k
return n
end
-function nodes.endofmath(n)
- for n in traverse_id(math_code,n.next) do
- return n
- end
-end
+local getfield=node.getfield or function(n,tag) return n[tag] end
+local setfield=node.setfield or function(n,tag,value) n[tag]=value end
+nodes.getfield=getfield
+nodes.setfield=setfield
+nodes.getattr=getfield
+nodes.setattr=setfield
+if node.getid then nodes.getid=node.getid else function nodes.getid (n) return getfield(n,"id") end end
+if node.getsubtype then nodes.getsubtype=node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end
+if node.getnext then nodes.getnext=node.getnext else function nodes.getnext (n) return getfield(n,"next") end end
+if node.getprev then nodes.getprev=node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end
+if node.getchar then nodes.getchar=node.getchar else function nodes.getchar (n) return getfield(n,"char") end end
+if node.getfont then nodes.getfont=node.getfont else function nodes.getfont (n) return getfield(n,"font") end end
+if node.getlist then nodes.getlist=node.getlist else function nodes.getlist (n) return getfield(n,"list") end end
+function nodes.tonut (n) return n end
+function nodes.tonode(n) return n end
+nodes.tostring=node.tostring or tostring
+nodes.copy=node.copy
+nodes.copy_list=node.copy_list
+nodes.delete=node.delete
+nodes.dimensions=node.dimensions
+nodes.end_of_math=node.end_of_math
+nodes.flush_list=node.flush_list
+nodes.flush_node=node.flush_node
+nodes.free=node.free
+nodes.insert_after=node.insert_after
+nodes.insert_before=node.insert_before
+nodes.hpack=node.hpack
+nodes.new=node.new
+nodes.tail=node.tail
+nodes.traverse=node.traverse
+nodes.traverse_id=node.traverse_id
+nodes.slide=node.slide
+nodes.vpack=node.vpack
+nodes.first_glyph=node.first_glyph
+nodes.first_character=node.first_character
+nodes.has_glyph=node.has_glyph or node.first_glyph
+nodes.current_attr=node.current_attr
+nodes.do_ligature_n=node.do_ligature_n
+nodes.has_field=node.has_field
+nodes.last_node=node.last_node
+nodes.usedlist=node.usedlist
+nodes.protrusion_skippable=node.protrusion_skippable
+nodes.write=node.write
+nodes.has_attribute=node.has_attribute
+nodes.set_attribute=node.set_attribute
+nodes.unset_attribute=node.unset_attribute
+nodes.protect_glyphs=node.protect_glyphs
+nodes.unprotect_glyphs=node.unprotect_glyphs
+nodes.kerning=node.kerning
+nodes.ligaturing=node.ligaturing
+nodes.mlist_to_hlist=node.mlist_to_hlist
+nodes.nuts=nodes
end -- closure
@@ -3551,6 +3928,34 @@ function constructors.beforecopyingcharacters(target,original)
end
function constructors.aftercopyingcharacters(target,original)
end
+constructors.sharefonts=false
+constructors.nofsharedfonts=0
+local sharednames={}
+function constructors.trytosharefont(target,tfmdata)
+ if constructors.sharefonts then
+ local characters=target.characters
+ local n=1
+ local t={ target.psname }
+ local u=sortedkeys(characters)
+ for i=1,#u do
+ local k=u[i]
+ n=n+1;t[n]=k
+ n=n+1;t[n]=characters[k].index or k
+ end
+ local h=md5.HEX(concat(t," "))
+ local s=sharednames[h]
+ if s then
+ if trace_defining then
+ report_defining("font %a uses backend resources of font %a",target.fullname,s)
+ end
+ target.fullname=s
+ constructors.nofsharedfonts=constructors.nofsharedfonts+1
+ target.properties.sharedwith=s
+ else
+ sharednames[h]=target.fullname
+ end
+ end
+end
function constructors.enhanceparameters(parameters)
local xheight=parameters.x_height
local quad=parameters.quad
@@ -3578,6 +3983,7 @@ function constructors.scale(tfmdata,specification)
if tonumber(specification) then
specification={ size=specification }
end
+ target.specification=specification
local scaledpoints=specification.size
local relativeid=specification.relativeid
local properties=tfmdata.properties or {}
@@ -3629,7 +4035,7 @@ function constructors.scale(tfmdata,specification)
targetproperties.script=properties.script or "dflt"
targetproperties.mode=properties.mode or "base"
local askedscaledpoints=scaledpoints
- local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints)
+ local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints,nil,specification)
local hdelta=delta
local vdelta=delta
target.designsize=parameters.designsize
@@ -3703,7 +4109,7 @@ function constructors.scale(tfmdata,specification)
end
target.type=isvirtual and "virtual" or "real"
target.postprocessors=tfmdata.postprocessors
- local targetslant=(parameters.slant or parameters[1] or 0)
+ local targetslant=(parameters.slant or parameters[1] or 0)*factors.pt
local targetspace=(parameters.space or parameters[2] or 0)*hdelta
local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta
local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta
@@ -3982,6 +4388,7 @@ function constructors.scale(tfmdata,specification)
targetcharacters[unicode]=chr
end
constructors.aftercopyingcharacters(target,tfmdata)
+ constructors.trytosharefont(target,tfmdata)
return target
end
function constructors.finalize(tfmdata)
@@ -4021,7 +4428,7 @@ function constructors.finalize(tfmdata)
parameters.slantfactor=tfmdata.slant or 0
end
if not parameters.designsize then
- parameters.designsize=tfmdata.designsize or 655360
+ parameters.designsize=tfmdata.designsize or (factors.pt*10)
end
if not parameters.units then
parameters.units=tfmdata.units_per_em or 1000
@@ -4145,11 +4552,11 @@ function constructors.hashinstance(specification,force)
size=math.round(constructors.scaled(size,designsizes[hash]))
specification.size=size
end
- if fallbacks then
- return hash..' @ '..tostring(size)..' @ '..fallbacks
- else
- return hash..' @ '..tostring(size)
- end
+ if fallbacks then
+ return hash..' @ '..tostring(size)..' @ '..fallbacks
+ else
+ return hash..' @ '..tostring(size)
+ end
end
function constructors.setname(tfmdata,specification)
if constructors.namemode=="specification" then
@@ -4383,7 +4790,8 @@ function constructors.collectprocessors(what,tfmdata,features,trace,report)
local whathandler=handlers[what]
local whatfeatures=whathandler.features
local whatprocessors=whatfeatures.processors
- local processors=whatprocessors[properties.mode]
+ local mode=properties.mode
+ local processors=whatprocessors[mode]
if processors then
for i=1,#processors do
local step=processors[i]
@@ -4400,7 +4808,7 @@ function constructors.collectprocessors(what,tfmdata,features,trace,report)
end
end
elseif trace then
- report("no feature processors for mode %a for font %a",mode,tfmdata.properties.fullname)
+ report("no feature processors for mode %a for font %a",mode,properties.fullname)
end
end
return processes
@@ -4411,7 +4819,8 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report)
local whathandler=handlers[what]
local whatfeatures=whathandler.features
local whatmanipulators=whatfeatures.manipulators
- local manipulators=whatmanipulators[properties.mode]
+ local mode=properties.mode
+ local manipulators=whatmanipulators[mode]
if manipulators then
for i=1,#manipulators do
local step=manipulators[i]
@@ -4420,7 +4829,7 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report)
if value then
local action=step.action
if trace then
- report("applying feature manipulator %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
+ report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname)
end
if action then
action(tfmdata,feature,value)
@@ -4780,33 +5189,38 @@ function mappings.addtounicode(data,filename)
if not unicode or unicode=="" then
local split=lpegmatch(namesplitter,name)
local nsplit=split and #split or 0
- if nsplit>=2 then
- local t,n={},0
- for l=1,nsplit do
- local base=split[l]
- local u=unicodes[base] or unicodevector[base]
- if not u then
+ local t,n={},0
+ unicode=true
+ for l=1,nsplit do
+ local base=split[l]
+ local u=unicodes[base] or unicodevector[base]
+ if not u then
+ break
+ elseif type(u)=="table" then
+ if u[1]>=private then
+ unicode=false
break
- elseif type(u)=="table" then
- n=n+1
- t[n]=u[1]
- else
- n=n+1
- t[n]=u
end
- end
- if n==0 then
- elseif n==1 then
- originals[index]=t[1]
- tounicode[index]=tounicode16(t[1],name)
+ n=n+1
+ t[n]=u[1]
else
- originals[index]=t
- tounicode[index]=tounicode16sequence(t)
+ if u>=private then
+ unicode=false
+ break
+ end
+ n=n+1
+ t[n]=u
end
- nl=nl+1
- unicode=true
+ end
+ if n==0 then
+ elseif n==1 then
+ originals[index]=t[1]
+ tounicode[index]=tounicode16(t[1],name)
else
+ originals[index]=t
+ tounicode[index]=tounicode16sequence(t)
end
+ nl=nl+1
end
if not unicode or unicode=="" then
local foundcodes,multiple=lpegmatch(uparser,name)
@@ -4917,6 +5331,1103 @@ fonts.names.resolvespec=fonts.names.resolve
function fonts.names.getfilename(askedname,suffix)
return ""
end
+function fonts.names.ignoredfile(filename)
+ return false
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-tfm']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next=next
+local match=string.match
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local trace_features=false trackers.register("tfm.features",function(v) trace_features=v end)
+local report_defining=logs.reporter("fonts","defining")
+local report_tfm=logs.reporter("fonts","tfm loading")
+local findbinfile=resolvers.findbinfile
+local fonts=fonts
+local handlers=fonts.handlers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local encodings=fonts.encodings
+local tfm=constructors.newhandler("tfm")
+local tfmfeatures=constructors.newfeatures("tfm")
+local registertfmfeature=tfmfeatures.register
+constructors.resolvevirtualtoo=false
+fonts.formats.tfm="type1"
+function tfm.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm)
+ if okay then
+ return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm)
+ else
+ return {}
+ end
+end
+local function read_from_tfm(specification)
+ local filename=specification.filename
+ local size=specification.size
+ if trace_defining then
+ report_defining("loading tfm file %a at size %s",filename,size)
+ end
+ local tfmdata=font.read_tfm(filename,size)
+ if tfmdata then
+ local features=specification.features and specification.features.normal or {}
+ local resources=tfmdata.resources or {}
+ local properties=tfmdata.properties or {}
+ local parameters=tfmdata.parameters or {}
+ local shared=tfmdata.shared or {}
+ properties.name=tfmdata.name
+ properties.fontname=tfmdata.fontname
+ properties.psname=tfmdata.psname
+ properties.filename=specification.filename
+ parameters.size=size
+ shared.rawdata={}
+ shared.features=features
+ shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil
+ tfmdata.properties=properties
+ tfmdata.resources=resources
+ tfmdata.parameters=parameters
+ tfmdata.shared=shared
+ parameters.slant=parameters.slant or parameters[1] or 0
+ parameters.space=parameters.space or parameters[2] or 0
+ parameters.space_stretch=parameters.space_stretch or parameters[3] or 0
+ parameters.space_shrink=parameters.space_shrink or parameters[4] or 0
+ parameters.x_height=parameters.x_height or parameters[5] or 0
+ parameters.quad=parameters.quad or parameters[6] or 0
+ parameters.extra_space=parameters.extra_space or parameters[7] or 0
+ constructors.enhanceparameters(parameters)
+ if constructors.resolvevirtualtoo then
+ fonts.loggers.register(tfmdata,file.suffix(filename),specification)
+ local vfname=findbinfile(specification.name,'ovf')
+ if vfname and vfname~="" then
+ local vfdata=font.read_vf(vfname,size)
+ if vfdata then
+ local chars=tfmdata.characters
+ for k,v in next,vfdata.characters do
+ chars[k].commands=v.commands
+ end
+ properties.virtualized=true
+ tfmdata.fonts=vfdata.fonts
+ end
+ end
+ end
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm)
+ if not features.encoding then
+ local encoding,filename=match(properties.filename,"^(.-)%-(.*)$")
+ if filename and encoding and encodings.known and encodings.known[encoding] then
+ features.encoding=encoding
+ end
+ end
+ return tfmdata
+ end
+end
+local function check_tfm(specification,fullname)
+ local foundname=findbinfile(fullname,'tfm') or ""
+ if foundname=="" then
+ foundname=findbinfile(fullname,'ofm') or ""
+ end
+ if foundname=="" then
+ foundname=fonts.names.getfilename(fullname,"tfm") or ""
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="ofm"
+ return read_from_tfm(specification)
+ elseif trace_defining then
+ report_defining("loading tfm with name %a fails",specification.name)
+ end
+end
+readers.check_tfm=check_tfm
+function readers.tfm(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ return check_tfm(specification,fullname)
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-afm']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers
+local next,type,tonumber=next,type,tonumber
+local format,match,gmatch,lower,gsub,strip=string.format,string.match,string.gmatch,string.lower,string.gsub,string.strip
+local abs=math.abs
+local P,S,C,R,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.C,lpeg.R,lpeg.match,lpeg.patterns
+local derivetable=table.derive
+local trace_features=false trackers.register("afm.features",function(v) trace_features=v end)
+local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end)
+local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end)
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local report_afm=logs.reporter("fonts","afm loading")
+local findbinfile=resolvers.findbinfile
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local afm=constructors.newhandler("afm")
+local pfb=constructors.newhandler("pfb")
+local afmfeatures=constructors.newfeatures("afm")
+local registerafmfeature=afmfeatures.register
+afm.version=1.410
+afm.cache=containers.define("fonts","afm",afm.version,true)
+afm.autoprefixed=true
+afm.helpdata={}
+afm.syncspace=true
+afm.addligatures=true
+afm.addtexligatures=true
+afm.addkerns=true
+local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode=lower(value)
+ end
+end
+registerafmfeature {
+ name="mode",
+ description="mode",
+ initializers={
+ base=setmode,
+ node=setmode,
+ }
+}
+local comment=P("Comment")
+local spacing=patterns.spacer
+local lineend=patterns.newline
+local words=C((1-lineend)^1)
+local number=C((R("09")+S("."))^1)/tonumber*spacing^0
+local data=lpeg.Carg(1)
+local pattern=(
+ comment*spacing*(
+ data*(
+ ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end
+ )+(1-lineend)^0
+ )+(1-comment)^1
+)^0
+local function scan_comment(str)
+ local fd={}
+ lpegmatch(pattern,str,1,fd)
+ return fd
+end
+local keys={}
+function keys.FontName (data,line) data.metadata.fontname=strip (line)
+ data.metadata.fullname=strip (line) end
+function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end
+function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch=toboolean(line,true) end
+function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end
+function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end
+function keys.Descender (data,line) data.metadata.descender=tonumber (line) end
+function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end
+function keys.Comment (data,line)
+ line=lower(line)
+ local designsize=match(line,"designsize[^%d]*(%d+)")
+ if designsize then data.metadata.designsize=tonumber(designsize) end
+end
+local function get_charmetrics(data,charmetrics,vector)
+ local characters=data.characters
+ local chr,ind={},0
+ for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do
+ if k=='C' then
+ v=tonumber(v)
+ if v<0 then
+ ind=ind+1
+ else
+ ind=v
+ end
+ chr={
+ index=ind
+ }
+ elseif k=='WX' then
+ chr.width=tonumber(v)
+ elseif k=='N' then
+ characters[v]=chr
+ elseif k=='B' then
+ local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$")
+ chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) }
+ elseif k=='L' then
+ local plus,becomes=match(v,"^(.-) +(.-)$")
+ local ligatures=chr.ligatures
+ if ligatures then
+ ligatures[plus]=becomes
+ else
+ chr.ligatures={ [plus]=becomes }
+ end
+ end
+ end
+end
+local function get_kernpairs(data,kernpairs)
+ local characters=data.characters
+ for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do
+ local chr=characters[one]
+ if chr then
+ local kerns=chr.kerns
+ if kerns then
+ kerns[two]=tonumber(value)
+ else
+ chr.kerns={ [two]=tonumber(value) }
+ end
+ end
+ end
+end
+local function get_variables(data,fontmetrics)
+ for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do
+ local keyhandler=keys[key]
+ if keyhandler then
+ keyhandler(data,rest)
+ end
+ end
+end
+local function get_indexes(data,pfbname)
+ data.resources.filename=resolvers.unresolve(pfbname)
+ local pfbblob=fontloader.open(pfbname)
+ if pfbblob then
+ local characters=data.characters
+ local pfbdata=fontloader.to_table(pfbblob)
+ if pfbdata then
+ local glyphs=pfbdata.glyphs
+ if glyphs then
+ if trace_loading then
+ report_afm("getting index data from %a",pfbname)
+ end
+ for index,glyph in next,glyphs do
+ local name=glyph.name
+ if name then
+ local char=characters[name]
+ if char then
+ if trace_indexing then
+ report_afm("glyph %a has index %a",name,index)
+ end
+ char.index=index
+ end
+ end
+ end
+ elseif trace_loading then
+ report_afm("no glyph data in pfb file %a",pfbname)
+ end
+ elseif trace_loading then
+ report_afm("no data in pfb file %a",pfbname)
+ end
+ fontloader.close(pfbblob)
+ elseif trace_loading then
+ report_afm("invalid pfb file %a",pfbname)
+ end
+end
+local function readafm(filename)
+ local ok,afmblob,size=resolvers.loadbinfile(filename)
+ if ok and afmblob then
+ local data={
+ resources={
+ filename=resolvers.unresolve(filename),
+ version=afm.version,
+ creator="context mkiv",
+ },
+ properties={
+ hasitalics=false,
+ },
+ goodies={},
+ metadata={
+ filename=file.removesuffix(file.basename(filename))
+ },
+ characters={
+ },
+ descriptions={
+ },
+ }
+ afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics)
+ if trace_loading then
+ report_afm("loading char metrics")
+ end
+ get_charmetrics(data,charmetrics,vector)
+ return ""
+ end)
+ afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs)
+ if trace_loading then
+ report_afm("loading kern pairs")
+ end
+ get_kernpairs(data,kernpairs)
+ return ""
+ end)
+ afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics)
+ if trace_loading then
+ report_afm("loading variables")
+ end
+ data.afmversion=version
+ get_variables(data,fontmetrics)
+ data.fontdimens=scan_comment(fontmetrics)
+ return ""
+ end)
+ return data
+ else
+ if trace_loading then
+ report_afm("no valid afm file %a",filename)
+ end
+ return nil
+ end
+end
+local addkerns,addligatures,addtexligatures,unify,normalize
+function afm.load(filename)
+ filename=resolvers.findfile(filename,'afm') or ""
+ if filename~="" and not fonts.names.ignoredfile(filename) then
+ local name=file.removesuffix(file.basename(filename))
+ local data=containers.read(afm.cache,name)
+ local attr=lfs.attributes(filename)
+ local size,time=attr.size or 0,attr.modification or 0
+ local pfbfile=file.replacesuffix(name,"pfb")
+ local pfbname=resolvers.findfile(pfbfile,"pfb") or ""
+ if pfbname=="" then
+ pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or ""
+ end
+ local pfbsize,pfbtime=0,0
+ if pfbname~="" then
+ local attr=lfs.attributes(pfbname)
+ pfbsize=attr.size or 0
+ pfbtime=attr.modification or 0
+ end
+ if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then
+ report_afm("reading %a",filename)
+ data=readafm(filename)
+ if data then
+ if pfbname~="" then
+ get_indexes(data,pfbname)
+ elseif trace_loading then
+ report_afm("no pfb file for %a",filename)
+ end
+ report_afm("unifying %a",filename)
+ unify(data,filename)
+ if afm.addligatures then
+ report_afm("add ligatures")
+ addligatures(data)
+ end
+ if afm.addtexligatures then
+ report_afm("add tex ligatures")
+ addtexligatures(data)
+ end
+ if afm.addkerns then
+ report_afm("add extra kerns")
+ addkerns(data)
+ end
+ normalize(data)
+ report_afm("add tounicode data")
+ fonts.mappings.addtounicode(data,filename)
+ data.size=size
+ data.time=time
+ data.pfbsize=pfbsize
+ data.pfbtime=pfbtime
+ report_afm("saving %a in cache",name)
+ data=containers.write(afm.cache,name,data)
+ data=containers.read(afm.cache,name)
+ end
+ if applyruntimefixes and data then
+ applyruntimefixes(filename,data)
+ end
+ end
+ return data
+ else
+ return nil
+ end
+end
+local uparser=fonts.mappings.makenameparser()
+unify=function(data,filename)
+ local unicodevector=fonts.encodings.agl.unicodes
+ local unicodes,names={},{}
+ local private=constructors.privateoffset
+ local descriptions=data.descriptions
+ for name,blob in next,data.characters do
+ local code=unicodevector[name]
+ if not code then
+ code=lpegmatch(uparser,name)
+ if not code then
+ code=private
+ private=private+1
+ report_afm("assigning private slot %U for unknown glyph name %a",code,name)
+ end
+ end
+ local index=blob.index
+ unicodes[name]=code
+ names[name]=index
+ blob.name=name
+ descriptions[code]={
+ boundingbox=blob.boundingbox,
+ width=blob.width,
+ kerns=blob.kerns,
+ index=index,
+ name=name,
+ }
+ end
+ for unicode,description in next,descriptions do
+ local kerns=description.kerns
+ if kerns then
+ local krn={}
+ for name,kern in next,kerns do
+ local unicode=unicodes[name]
+ if unicode then
+ krn[unicode]=kern
+ else
+ end
+ end
+ description.kerns=krn
+ end
+ end
+ data.characters=nil
+ local resources=data.resources
+ local filename=resources.filename or file.removesuffix(file.basename(filename))
+ resources.filename=resolvers.unresolve(filename)
+ resources.unicodes=unicodes
+ resources.marks={}
+ resources.names=names
+ resources.private=private
+end
+normalize=function(data)
+end
+local addthem=function(rawdata,ligatures)
+ if ligatures then
+ local descriptions=rawdata.descriptions
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ local names=resources.names
+ for ligname,ligdata in next,ligatures do
+ local one=descriptions[unicodes[ligname]]
+ if one then
+ for _,pair in next,ligdata do
+ local two,three=unicodes[pair[1]],unicodes[pair[2]]
+ if two and three then
+ local ol=one.ligatures
+ if ol then
+ if not ol[two] then
+ ol[two]=three
+ end
+ else
+ one.ligatures={ [two]=three }
+ end
+ end
+ end
+ end
+ end
+ end
+end
+addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end
+addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end
+addkerns=function(rawdata)
+ local descriptions=rawdata.descriptions
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ local function do_it_left(what)
+ if what then
+ for unicode,description in next,descriptions do
+ local kerns=description.kerns
+ if kerns then
+ local extrakerns
+ for complex,simple in next,what do
+ complex=unicodes[complex]
+ simple=unicodes[simple]
+ if complex and simple then
+ local ks=kerns[simple]
+ if ks and not kerns[complex] then
+ if extrakerns then
+ extrakerns[complex]=ks
+ else
+ extrakerns={ [complex]=ks }
+ end
+ end
+ end
+ end
+ if extrakerns then
+ description.extrakerns=extrakerns
+ end
+ end
+ end
+ end
+ end
+ local function do_it_copy(what)
+ if what then
+ for complex,simple in next,what do
+ complex=unicodes[complex]
+ simple=unicodes[simple]
+ if complex and simple then
+ local complexdescription=descriptions[complex]
+ if complexdescription then
+ local simpledescription=descriptions[complex]
+ if simpledescription then
+ local extrakerns
+ local kerns=simpledescription.kerns
+ if kerns then
+ for unicode,kern in next,kerns do
+ if extrakerns then
+ extrakerns[unicode]=kern
+ else
+ extrakerns={ [unicode]=kern }
+ end
+ end
+ end
+ local extrakerns=simpledescription.extrakerns
+ if extrakerns then
+ for unicode,kern in next,extrakerns do
+ if extrakerns then
+ extrakerns[unicode]=kern
+ else
+ extrakerns={ [unicode]=kern }
+ end
+ end
+ end
+ if extrakerns then
+ complexdescription.extrakerns=extrakerns
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ do_it_left(afm.helpdata.leftkerned)
+ do_it_left(afm.helpdata.bothkerned)
+ do_it_copy(afm.helpdata.bothkerned)
+ do_it_copy(afm.helpdata.rightkerned)
+end
+local function adddimensions(data)
+ if data then
+ for unicode,description in next,data.descriptions do
+ local bb=description.boundingbox
+ if bb then
+ local ht,dp=bb[4],-bb[2]
+ if ht==0 or ht<0 then
+ else
+ description.height=ht
+ end
+ if dp==0 or dp<0 then
+ else
+ description.depth=dp
+ end
+ end
+ end
+ end
+end
+local function copytotfm(data)
+ if data and data.descriptions then
+ local metadata=data.metadata
+ local resources=data.resources
+ local properties=derivetable(data.properties)
+ local descriptions=derivetable(data.descriptions)
+ local goodies=derivetable(data.goodies)
+ local characters={}
+ local parameters={}
+ local unicodes=resources.unicodes
+ for unicode,description in next,data.descriptions do
+ characters[unicode]={}
+ end
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname or metadata.fullname
+ local fullname=metadata.fullname or metadata.fontname
+ local endash=unicodes['space']
+ local emdash=unicodes['emdash']
+ local spacer="space"
+ local spaceunits=500
+ local monospaced=metadata.isfixedpitch
+ local charwidth=metadata.charwidth
+ local italicangle=metadata.italicangle
+ local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight
+ properties.monospaced=monospaced
+ parameters.italicangle=italicangle
+ parameters.charwidth=charwidth
+ parameters.charxheight=charxheight
+ if properties.monospaced then
+ if descriptions[endash] then
+ spaceunits,spacer=descriptions[endash].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width,"emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ else
+ if descriptions[endash] then
+ spaceunits,spacer=descriptions[endash].width,"space"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ end
+ spaceunits=tonumber(spaceunits)
+ if spaceunits<200 then
+ end
+ parameters.slant=0
+ parameters.space=spaceunits
+ parameters.space_stretch=500
+ parameters.space_shrink=333
+ parameters.x_height=400
+ parameters.quad=1000
+ if italicangle and italicangle~=0 then
+ parameters.italicangle=italicangle
+ parameters.italicfactor=math.cos(math.rad(90+italicangle))
+ parameters.slant=- math.tan(italicangle*math.pi/180)
+ end
+ if monospaced then
+ parameters.space_stretch=0
+ parameters.space_shrink=0
+ elseif afm.syncspace then
+ parameters.space_stretch=spaceunits/2
+ parameters.space_shrink=spaceunits/3
+ end
+ parameters.extra_space=parameters.space_shrink
+ if charxheight then
+ parameters.x_height=charxheight
+ else
+ local x=unicodes['x']
+ if x then
+ local x=descriptions[x]
+ if x then
+ parameters.x_height=x.height
+ end
+ end
+ end
+ local fd=data.fontdimens
+ if fd and fd[8] and fd[9] and fd[10] then
+ for k,v in next,fd do
+ parameters[k]=v
+ end
+ end
+ parameters.designsize=(metadata.designsize or 10)*65536
+ parameters.ascender=abs(metadata.ascender or 0)
+ parameters.descender=abs(metadata.descender or 0)
+ parameters.units=1000
+ properties.spacer=spacer
+ properties.encodingbytes=2
+ properties.format=fonts.formats[filename] or "type1"
+ properties.filename=filename
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.psname=fullname
+ properties.name=filename or fullname or fontname
+ if next(characters) then
+ return {
+ characters=characters,
+ descriptions=descriptions,
+ parameters=parameters,
+ resources=resources,
+ properties=properties,
+ goodies=goodies,
+ }
+ end
+ end
+ return nil
+end
+function afm.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm)
+ if okay then
+ return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm)
+ else
+ return {}
+ end
+end
+local function checkfeatures(specification)
+end
+local function afmtotfm(specification)
+ local afmname=specification.filename or specification.name
+ if specification.forced=="afm" or specification.format=="afm" then
+ if trace_loading then
+ report_afm("forcing afm format for %a",afmname)
+ end
+ else
+ local tfmname=findbinfile(afmname,"ofm") or ""
+ if tfmname~="" then
+ if trace_loading then
+ report_afm("fallback from afm to tfm for %a",afmname)
+ end
+ return
+ end
+ end
+ if afmname~="" then
+ local features=constructors.checkedfeatures("afm",specification.features.normal)
+ specification.features.normal=features
+ constructors.hashinstance(specification,true)
+ specification=definers.resolve(specification)
+ local cache_id=specification.hash
+ local tfmdata=containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local rawdata=afm.load(afmname)
+ if rawdata and next(rawdata) then
+ adddimensions(rawdata)
+ tfmdata=copytotfm(rawdata)
+ if tfmdata and next(tfmdata) then
+ local shared=tfmdata.shared
+ if not shared then
+ shared={}
+ tfmdata.shared=shared
+ end
+ shared.rawdata=rawdata
+ shared.features=features
+ shared.processes=afm.setfeatures(tfmdata,features)
+ end
+ elseif trace_loading then
+ report_afm("no (valid) afm file found with name %a",afmname)
+ end
+ tfmdata=containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+ end
+end
+local function read_from_afm(specification)
+ local tfmdata=afmtotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name=specification.name
+ tfmdata=constructors.scale(tfmdata,specification)
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm)
+ fonts.loggers.register(tfmdata,'afm',specification)
+ end
+ return tfmdata
+end
+local function prepareligatures(tfmdata,ligatures,value)
+ if value then
+ local descriptions=tfmdata.descriptions
+ for unicode,character in next,tfmdata.characters do
+ local description=descriptions[unicode]
+ local dligatures=description.ligatures
+ if dligatures then
+ local cligatures=character.ligatures
+ if not cligatures then
+ cligatures={}
+ character.ligatures=cligatures
+ end
+ for unicode,ligature in next,dligatures do
+ cligatures[unicode]={
+ char=ligature,
+ type=0
+ }
+ end
+ end
+ end
+ end
+end
+local function preparekerns(tfmdata,kerns,value)
+ if value then
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ local descriptions=tfmdata.descriptions
+ for u,chr in next,tfmdata.characters do
+ local d=descriptions[u]
+ local newkerns=d[kerns]
+ if newkerns then
+ local kerns=chr.kerns
+ if not kerns then
+ kerns={}
+ chr.kerns=kerns
+ end
+ for k,v in next,newkerns do
+ local uk=unicodes[k]
+ if uk then
+ kerns[uk]=v
+ end
+ end
+ end
+ end
+ end
+end
+local list={
+ [0x0027]=0x2019,
+}
+local function texreplacements(tfmdata,value)
+ local descriptions=tfmdata.descriptions
+ local characters=tfmdata.characters
+ for k,v in next,list do
+ characters [k]=characters [v]
+ descriptions[k]=descriptions[v]
+ end
+end
+local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures',value) end
+local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end
+local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns',value) end
+local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end
+registerafmfeature {
+ name="liga",
+ description="traditional ligatures",
+ initializers={
+ base=ligatures,
+ node=ligatures,
+ }
+}
+registerafmfeature {
+ name="kern",
+ description="intercharacter kerning",
+ initializers={
+ base=kerns,
+ node=kerns,
+ }
+}
+registerafmfeature {
+ name="extrakerns",
+ description="additional intercharacter kerning",
+ initializers={
+ base=extrakerns,
+ node=extrakerns,
+ }
+}
+registerafmfeature {
+ name='tlig',
+ description='tex ligatures',
+ initializers={
+ base=texligatures,
+ node=texligatures,
+ }
+}
+registerafmfeature {
+ name='trep',
+ description='tex replacements',
+ initializers={
+ base=texreplacements,
+ node=texreplacements,
+ }
+}
+local check_tfm=readers.check_tfm
+fonts.formats.afm="type1"
+fonts.formats.pfb="type1"
+local function check_afm(specification,fullname)
+ local foundname=findbinfile(fullname,'afm') or ""
+ if foundname=="" then
+ foundname=fonts.names.getfilename(fullname,"afm") or ""
+ end
+ if foundname=="" and afm.autoprefixed then
+ local encoding,shortname=match(fullname,"^(.-)%-(.*)$")
+ if encoding and shortname and fonts.encodings.known[encoding] then
+ shortname=findbinfile(shortname,'afm') or ""
+ if shortname~="" then
+ foundname=shortname
+ if trace_defining then
+ report_afm("stripping encoding prefix from filename %a",afmname)
+ end
+ end
+ end
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="afm"
+ return read_from_afm(specification)
+ end
+end
+function readers.afm(specification,method)
+ local fullname,tfmdata=specification.filename or "",nil
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ tfmdata=check_afm(specification,specification.name.."."..forced)
+ end
+ if not tfmdata then
+ method=method or definers.method or "afm or tfm"
+ if method=="tfm" then
+ tfmdata=check_tfm(specification,specification.name)
+ elseif method=="afm" then
+ tfmdata=check_afm(specification,specification.name)
+ elseif method=="tfm or afm" then
+ tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name)
+ else
+ tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name)
+ end
+ end
+ else
+ tfmdata=check_afm(specification,fullname)
+ end
+ return tfmdata
+end
+function readers.pfb(specification,method)
+ local original=specification.specification
+ if trace_defining then
+ report_afm("using afm reader for %a",original)
+ end
+ specification.specification=gsub(original,"%.pfb",".afm")
+ specification.forced="afm"
+ return readers.afm(specification,method)
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-afk']={
+ version=1.001,
+ comment="companion to font-afm.lua",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+ dataonly=true,
+}
+local allocate=utilities.storage.allocate
+fonts.handlers.afm.helpdata={
+ ligatures=allocate {
+ ['f']={
+ { 'f','ff' },
+ { 'i','fi' },
+ { 'l','fl' },
+ },
+ ['ff']={
+ { 'i','ffi' }
+ },
+ ['fi']={
+ { 'i','fii' }
+ },
+ ['fl']={
+ { 'i','fli' }
+ },
+ ['s']={
+ { 't','st' }
+ },
+ ['i']={
+ { 'j','ij' }
+ },
+ },
+ texligatures=allocate {
+ ['quoteleft']={
+ { 'quoteleft','quotedblleft' }
+ },
+ ['quoteright']={
+ { 'quoteright','quotedblright' }
+ },
+ ['hyphen']={
+ { 'hyphen','endash' }
+ },
+ ['endash']={
+ { 'hyphen','emdash' }
+ }
+ },
+ leftkerned=allocate {
+ AEligature="A",aeligature="a",
+ OEligature="O",oeligature="o",
+ IJligature="I",ijligature="i",
+ AE="A",ae="a",
+ OE="O",oe="o",
+ IJ="I",ij="i",
+ Ssharp="S",ssharp="s",
+ },
+ rightkerned=allocate {
+ AEligature="E",aeligature="e",
+ OEligature="E",oeligature="e",
+ IJligature="J",ijligature="j",
+ AE="E",ae="e",
+ OE="E",oe="e",
+ IJ="J",ij="j",
+ Ssharp="S",ssharp="s",
+ },
+ bothkerned=allocate {
+ Acircumflex="A",acircumflex="a",
+ Ccircumflex="C",ccircumflex="c",
+ Ecircumflex="E",ecircumflex="e",
+ Gcircumflex="G",gcircumflex="g",
+ Hcircumflex="H",hcircumflex="h",
+ Icircumflex="I",icircumflex="i",
+ Jcircumflex="J",jcircumflex="j",
+ Ocircumflex="O",ocircumflex="o",
+ Scircumflex="S",scircumflex="s",
+ Ucircumflex="U",ucircumflex="u",
+ Wcircumflex="W",wcircumflex="w",
+ Ycircumflex="Y",ycircumflex="y",
+ Agrave="A",agrave="a",
+ Egrave="E",egrave="e",
+ Igrave="I",igrave="i",
+ Ograve="O",ograve="o",
+ Ugrave="U",ugrave="u",
+ Ygrave="Y",ygrave="y",
+ Atilde="A",atilde="a",
+ Itilde="I",itilde="i",
+ Otilde="O",otilde="o",
+ Utilde="U",utilde="u",
+ Ntilde="N",ntilde="n",
+ Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a",
+ Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e",
+ Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i",
+ Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o",
+ Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u",
+ Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y",
+ Aacute="A",aacute="a",
+ Cacute="C",cacute="c",
+ Eacute="E",eacute="e",
+ Iacute="I",iacute="i",
+ Lacute="L",lacute="l",
+ Nacute="N",nacute="n",
+ Oacute="O",oacute="o",
+ Racute="R",racute="r",
+ Sacute="S",sacute="s",
+ Uacute="U",uacute="u",
+ Yacute="Y",yacute="y",
+ Zacute="Z",zacute="z",
+ Dstroke="D",dstroke="d",
+ Hstroke="H",hstroke="h",
+ Tstroke="T",tstroke="t",
+ Cdotaccent="C",cdotaccent="c",
+ Edotaccent="E",edotaccent="e",
+ Gdotaccent="G",gdotaccent="g",
+ Idotaccent="I",idotaccent="i",
+ Zdotaccent="Z",zdotaccent="z",
+ Amacron="A",amacron="a",
+ Emacron="E",emacron="e",
+ Imacron="I",imacron="i",
+ Omacron="O",omacron="o",
+ Umacron="U",umacron="u",
+ Ccedilla="C",ccedilla="c",
+ Kcedilla="K",kcedilla="k",
+ Lcedilla="L",lcedilla="l",
+ Ncedilla="N",ncedilla="n",
+ Rcedilla="R",rcedilla="r",
+ Scedilla="S",scedilla="s",
+ Tcedilla="T",tcedilla="t",
+ Ohungarumlaut="O",ohungarumlaut="o",
+ Uhungarumlaut="U",uhungarumlaut="u",
+ Aogonek="A",aogonek="a",
+ Eogonek="E",eogonek="e",
+ Iogonek="I",iogonek="i",
+ Uogonek="U",uogonek="u",
+ Aring="A",aring="a",
+ Uring="U",uring="u",
+ Abreve="A",abreve="a",
+ Ebreve="E",ebreve="e",
+ Gbreve="G",gbreve="g",
+ Ibreve="I",ibreve="i",
+ Obreve="O",obreve="o",
+ Ubreve="U",ubreve="u",
+ Ccaron="C",ccaron="c",
+ Dcaron="D",dcaron="d",
+ Ecaron="E",ecaron="e",
+ Lcaron="L",lcaron="l",
+ Ncaron="N",ncaron="n",
+ Rcaron="R",rcaron="r",
+ Scaron="S",scaron="s",
+ Tcaron="T",tcaron="t",
+ Zcaron="Z",zcaron="z",
+ dotlessI="I",dotlessi="i",
+ dotlessJ="J",dotlessj="j",
+ AEligature="AE",aeligature="ae",AE="AE",ae="ae",
+ OEligature="OE",oeligature="oe",OE="OE",oe="oe",
+ IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij",
+ Lstroke="L",lstroke="l",Lslash="L",lslash="l",
+ Ostroke="O",ostroke="o",Oslash="O",oslash="o",
+ Ssharp="SS",ssharp="ss",
+ Aumlaut="A",aumlaut="a",
+ Eumlaut="E",eumlaut="e",
+ Iumlaut="I",iumlaut="i",
+ Oumlaut="O",oumlaut="o",
+ Uumlaut="U",uumlaut="u",
+ }
+}
end -- closure
@@ -5055,9 +6566,9 @@ local utfbyte=utf.byte
local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
local type,next,tonumber,tostring=type,next,tonumber,tostring
local abs=math.abs
-local getn=table.getn
+local insert=table.insert
local lpegmatch=lpeg.match
-local reversed,concat,remove=table.reversed,table.concat,table.remove
+local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys
local ioflush=io.flush
local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive
local formatters=string.formatters
@@ -5079,7 +6590,7 @@ local report_otf=logs.reporter("fonts","otf loading")
local fonts=fonts
local otf=fonts.handlers.otf
otf.glists={ "gsub","gpos" }
-otf.version=2.743
+otf.version=2.755
otf.cache=containers.define("fonts","otf",otf.version,true)
local fontdata=fonts.hashes.identifiers
local chardata=characters and characters.data
@@ -5099,17 +6610,47 @@ local packdata=true
local syncspace=true
local forcenotdef=false
local includesubfonts=false
+local overloadkerns=false
+local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
local wildcard="*"
local default="dflt"
local fontloaderfields=fontloader.fields
local mainfields=nil
local glyphfields=nil
+local formats=fonts.formats
+formats.otf="opentype"
+formats.ttf="truetype"
+formats.ttc="truetype"
+formats.dfont="truetype"
registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end)
registerdirective("fonts.otf.loader.force",function(v) forceload=v end)
registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v end)
registerdirective("fonts.otf.loader.pack",function(v) packdata=v end)
registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
+registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end)
+function otf.fileformat(filename)
+ local leader=lower(io.loadchunk(filename,4))
+ local suffix=lower(file.suffix(filename))
+ if leader=="otto" then
+ return formats.otf,suffix=="otf"
+ elseif leader=="ttcf" then
+ return formats.ttc,suffix=="ttc"
+ elseif suffix=="ttc" then
+ return formats.ttc,true
+ elseif suffix=="dfont" then
+ return formats.dfont,true
+ else
+ return formats.ttf,suffix=="ttf"
+ end
+end
+local function otf_format(filename)
+ local format,okay=otf.fileformat(filename)
+ if not okay then
+ report_otf("font %a is actually an %a file",filename,format)
+ end
+ return format
+end
local function load_featurefile(raw,featurefile)
if featurefile and featurefile~="" then
if trace_loading then
@@ -5201,6 +6742,7 @@ local valid_fields=table.tohash {
"upos",
"use_typo_metrics",
"uwidth",
+ "validation_state",
"version",
"vert_base",
"weight",
@@ -5296,7 +6838,7 @@ end
function enhancers.register(what,action)
actions[what]=action
end
-function otf.load(filename,format,sub,featurefile)
+function otf.load(filename,sub,featurefile)
local base=file.basename(file.removesuffix(filename))
local name=file.removesuffix(base)
local attr=lfs.attributes(filename)
@@ -5394,7 +6936,7 @@ function otf.load(filename,format,sub,featurefile)
data={
size=size,
time=time,
- format=format,
+ format=otf_format(filename),
featuredata=featurefiles,
resources={
filename=resolvers.unresolve(filename),
@@ -5415,7 +6957,7 @@ function otf.load(filename,format,sub,featurefile)
},
descriptions={},
goodies={},
- helpers={
+ helpers={
tounicodelist=splitter,
tounicodetable=lpeg.Ct(splitter),
},
@@ -5460,6 +7002,9 @@ function otf.load(filename,format,sub,featurefile)
report_otf("loading from cache using hash %a",hash)
end
enhance("unpack",data,filename,nil,false)
+ if applyruntimefixes then
+ applyruntimefixes(filename,data)
+ end
enhance("add dimensions",data,filename,nil,false)
if trace_sequences then
showfeatureorder(data,filename)
@@ -5588,15 +7133,22 @@ actions["prepare glyphs"]=function(data,filename,raw)
local glyph=cidglyphs[index]
if glyph then
local unicode=glyph.unicode
+if unicode>=0x00E000 and unicode<=0x00F8FF then
+ unicode=-1
+elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then
+ unicode=-1
+elseif unicode>=0x100000 and unicode<=0x10FFFD then
+ unicode=-1
+end
local name=glyph.name or cidnames[index]
- if not unicode or unicode==-1 or unicode>=criterium then
+ if not unicode or unicode==-1 then
unicode=cidunicodes[index]
end
if unicode and descriptions[unicode] then
report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
unicode=-1
end
- if not unicode or unicode==-1 or unicode>=criterium then
+ if not unicode or unicode==-1 then
if not name then
name=format("u%06X",private)
end
@@ -5642,7 +7194,7 @@ actions["prepare glyphs"]=function(data,filename,raw)
if glyph then
local unicode=glyph.unicode
local name=glyph.name
- if not unicode or unicode==-1 or unicode>=criterium then
+ if not unicode or unicode==-1 then
unicode=private
unicodes[name]=private
if trace_private then
@@ -5664,7 +7216,6 @@ actions["prepare glyphs"]=function(data,filename,raw)
}
local altuni=glyph.altuni
if altuni then
- local d
for i=1,#altuni do
local a=altuni[i]
local u=a.unicode
@@ -5677,15 +7228,8 @@ actions["prepare glyphs"]=function(data,filename,raw)
vv={ [u]=unicode }
variants[v]=vv
end
- elseif d then
- d[#d+1]=u
- else
- d={ u }
end
end
- if d then
- duplicates[unicode]=d
- end
end
else
report_otf("potential problem: glyph %U is used but empty",index)
@@ -5703,47 +7247,45 @@ actions["check encoding"]=function(data,filename,raw)
local duplicates=resources.duplicates
local mapdata=raw.map or {}
local unicodetoindex=mapdata and mapdata.map or {}
+ local indextounicode=mapdata and mapdata.backmap or {}
local encname=lower(data.enc_name or mapdata.enc_name or "")
- local criterium=0xFFFF
+ local criterium=0xFFFF
+ local privateoffset=constructors.privateoffset
if find(encname,"unicode") then
if trace_loading then
report_otf("checking embedded unicode map %a",encname)
end
- for unicode,index in next,unicodetoindex do
- if unicode<=criterium and not descriptions[unicode] then
- local parent=indices[index]
- if not parent then
- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ local reported={}
+ for maybeunicode,index in next,unicodetoindex do
+ if descriptions[maybeunicode] then
+ else
+ local unicode=indices[index]
+ if not unicode then
+ elseif maybeunicode==unicode then
+ elseif unicode>privateoffset then
else
- local parentdescription=descriptions[parent]
- if parentdescription then
- local altuni=parentdescription.altuni
- if not altuni then
- altuni={ { unicode=parent } }
- parentdescription.altuni=altuni
- duplicates[parent]={ unicode }
+ local d=descriptions[unicode]
+ if d then
+ local c=d.copies
+ if c then
+ c[maybeunicode]=true
else
- local done=false
- for i=1,#altuni do
- if altuni[i].unicode==parent then
- done=true
- break
- end
- end
- if not done then
- altuni[#altuni+1]={ unicode=parent }
- table.insert(duplicates[parent],unicode)
- end
- end
- if trace_loading then
- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ d.copies={ [maybeunicode]=true }
end
- else
- report_otf("weird, unicode %U points to %U with index %H",unicode,index)
+ elseif index and not reported[index] then
+ report_otf("missing index %i",index)
+ reported[index]=true
end
end
end
end
+ for unicode,data in next,descriptions do
+ local d=data.copies
+ if d then
+ duplicates[unicode]=sortedkeys(d)
+ data.copies=nil
+ end
+ end
elseif properties.cidinfo then
report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
else
@@ -5751,6 +7293,7 @@ actions["check encoding"]=function(data,filename,raw)
end
if mapdata then
mapdata.map={}
+ mapdata.backmap={}
end
end
actions["add duplicates"]=function(data,filename,raw)
@@ -5761,28 +7304,37 @@ actions["add duplicates"]=function(data,filename,raw)
local indices=resources.indices
local duplicates=resources.duplicates
for unicode,d in next,duplicates do
- for i=1,#d do
- local u=d[i]
- if not descriptions[u] then
- local description=descriptions[unicode]
- local duplicate=table.copy(description)
- duplicate.comment=format("copy of U+%05X",unicode)
- descriptions[u]=duplicate
- local n=0
- for _,description in next,descriptions do
- if kerns then
- local kerns=description.kerns
- for _,k in next,kerns do
- local ku=k[unicode]
- if ku then
- k[u]=ku
- n=n+1
+ local nofduplicates=#d
+ if nofduplicates>4 then
+ if trace_loading then
+ report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
+ end
+ else
+ for i=1,nofduplicates do
+ local u=d[i]
+ if not descriptions[u] then
+ local description=descriptions[unicode]
+ local n=0
+ for _,description in next,descriptions do
+ if kerns then
+ local kerns=description.kerns
+ for _,k in next,kerns do
+ local ku=k[unicode]
+ if ku then
+ k[u]=ku
+ n=n+1
+ end
end
end
end
- end
- if trace_loading then
- report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ if u>0 then
+ local duplicate=table.copy(description)
+ duplicate.comment=format("copy of U+%05X",unicode)
+ descriptions[u]=duplicate
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ end
+ end
end
end
end
@@ -5935,14 +7487,6 @@ local g_directions={
gsub_reversecontextchain=-1,
gpos_reversecontextchain=-1,
}
-local function supported(features)
- for i=1,#features do
- if features[i].ismac then
- return false
- end
- end
- return true
-end
actions["reorganize subtables"]=function(data,filename,raw)
local resources=data.resources
local sequences={}
@@ -5956,7 +7500,6 @@ actions["reorganize subtables"]=function(data,filename,raw)
for k=1,#dw do
local gk=dw[k]
local features=gk.features
- if not features or supported(features) then
local typ=gk.type
local chain=g_directions[typ] or 0
local subtables=gk.subtables
@@ -5986,10 +7529,16 @@ actions["reorganize subtables"]=function(data,filename,raw)
report_otf("skipping weird lookup number %s",k)
elseif features then
local f={}
+ local o={}
for i=1,#features do
local df=features[i]
local tag=strip(lower(df.tag))
- local ft=f[tag] if not ft then ft={} f[tag]=ft end
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ o[#o+1]=tag
+ end
local dscripts=df.scripts
for i=1,#dscripts do
local d=dscripts[i]
@@ -6009,6 +7558,7 @@ actions["reorganize subtables"]=function(data,filename,raw)
subtables=subtables,
markclass=markclass,
features=f,
+ order=o,
}
else
lookups[name]={
@@ -6019,7 +7569,6 @@ actions["reorganize subtables"]=function(data,filename,raw)
markclass=markclass,
}
end
- end
end
end
end
@@ -6390,74 +7939,93 @@ actions["merge kern classes"]=function(data,filename,raw)
local resources=data.resources
local unicodes=resources.unicodes
local splitter=data.helpers.tounicodetable
+ local ignored=0
+ local blocked=0
for gp=1,#gposlist do
local gpos=gposlist[gp]
local subtables=gpos.subtables
if subtables then
+ local first_done={}
+ local split={}
for s=1,#subtables do
local subtable=subtables[s]
local kernclass=subtable.kernclass
+ local lookup=subtable.lookup or subtable.name
if kernclass then
- local split={}
- for k=1,#kernclass do
- local kcl=kernclass[k]
- local firsts=kcl.firsts
- local seconds=kcl.seconds
- local offsets=kcl.offsets
- local lookups=kcl.lookup
- if type(lookups)~="table" then
- lookups={ lookups }
- end
- for n,s in next,firsts do
- split[s]=split[s] or lpegmatch(splitter,s)
- end
- local maxseconds=0
- for n,s in next,seconds do
- if n>maxseconds then
- maxseconds=n
- end
- split[s]=split[s] or lpegmatch(splitter,s)
- end
- for l=1,#lookups do
- local lookup=lookups[l]
- for fk=1,#firsts do
- local fv=firsts[fk]
- local splt=split[fv]
- if splt then
- local extrakerns={}
- local baseoffset=(fk-1)*maxseconds
- for sk=2,maxseconds do
- local sv=seconds[sk]
- local splt=split[sv]
- if splt then
- local offset=offsets[baseoffset+sk]
- if offset then
- for i=1,#splt do
- extrakerns[splt[i]]=offset
- end
- end
+ if #kernclass>0 then
+ kernclass=kernclass[1]
+ lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup
+ report_otf("fixing kernclass table of lookup %a",lookup)
+ end
+ local firsts=kernclass.firsts
+ local seconds=kernclass.seconds
+ local offsets=kernclass.offsets
+ for n,s in next,firsts do
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ local maxseconds=0
+ for n,s in next,seconds do
+ if n>maxseconds then
+ maxseconds=n
+ end
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ for fk=1,#firsts do
+ local fv=firsts[fk]
+ local splt=split[fv]
+ if splt then
+ local extrakerns={}
+ local baseoffset=(fk-1)*maxseconds
+ for sk=2,maxseconds do
+ local sv=seconds[sk]
+ local splt=split[sv]
+ if splt then
+ local offset=offsets[baseoffset+sk]
+ if offset then
+ for i=1,#splt do
+ extrakerns[splt[i]]=offset
end
end
- for i=1,#splt do
- local first_unicode=splt[i]
- local description=descriptions[first_unicode]
- if description then
- local kerns=description.kerns
- if not kerns then
- kerns={}
- description.kerns=kerns
- end
- local lookupkerns=kerns[lookup]
- if not lookupkerns then
- lookupkerns={}
- kerns[lookup]=lookupkerns
- end
+ end
+ end
+ for i=1,#splt do
+ local first_unicode=splt[i]
+ if first_done[first_unicode] then
+ report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode)
+ blocked=blocked+1
+ else
+ first_done[first_unicode]=true
+ local description=descriptions[first_unicode]
+ if description then
+ local kerns=description.kerns
+ if not kerns then
+ kerns={}
+ description.kerns=kerns
+ end
+ local lookupkerns=kerns[lookup]
+ if not lookupkerns then
+ lookupkerns={}
+ kerns[lookup]=lookupkerns
+ end
+ if overloadkerns then
for second_unicode,kern in next,extrakerns do
lookupkerns[second_unicode]=kern
end
- elseif trace_loading then
- report_otf("no glyph data for %U",first_unicode)
+ else
+ for second_unicode,kern in next,extrakerns do
+ local k=lookupkerns[second_unicode]
+ if not k then
+ lookupkerns[second_unicode]=kern
+ elseif k~=kern then
+ if trace_loading then
+ report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern)
+ end
+ ignored=ignored+1
+ end
+ end
end
+ elseif trace_loading then
+ report_otf("no glyph data for %U",first_unicode)
end
end
end
@@ -6468,6 +8036,12 @@ actions["merge kern classes"]=function(data,filename,raw)
end
end
end
+ if ignored>0 then
+ report_otf("%s kern overloads ignored",ignored)
+ end
+ if blocked>0 then
+ report_otf("%s succesive kerns blocked",blocked)
+ end
end
end
actions["check glyphs"]=function(data,filename,raw)
@@ -6491,6 +8065,11 @@ actions["check metadata"]=function(data,filename,raw)
ttftables[i].data="deleted"
end
end
+ if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
+ local name=file.nameonly(filename)
+ metadata.fontname="bad-fontname-"..name
+ metadata.fullname="bad-fullname-"..name
+ end
end
actions["cleanup tables"]=function(data,filename,raw)
data.resources.indices=nil
@@ -6681,10 +8260,19 @@ local function copytotfm(data,cache_id)
end
end
end
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname
+ local fullname=metadata.fullname or fontname
+ local units=metadata.units_per_em or 1000
+ if units==0 then
+ units=1000
+ metadata.units_per_em=1000
+ report_otf("changing %a units to %a",0,units)
+ end
local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced")
local charwidth=pfminfo.avgwidth
- local italicangle=metadata.italicangle
local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight
+ local italicangle=metadata.italicangle
properties.monospaced=monospaced
parameters.italicangle=italicangle
parameters.charwidth=charwidth
@@ -6713,14 +8301,6 @@ local function copytotfm(data,cache_id)
end
end
spaceunits=tonumber(spaceunits) or 500
- local filename=constructors.checkedfilename(resources)
- local fontname=metadata.fontname
- local fullname=metadata.fullname or fontname
- local units=metadata.units_per_em or 1000
- if units==0 then
- units=1000
- metadata.units_per_em=1000
- end
parameters.slant=0
parameters.space=spaceunits
parameters.space_stretch=units/2
@@ -6729,10 +8309,10 @@ local function copytotfm(data,cache_id)
parameters.quad=units
if spaceunits<2*units/5 then
end
- if italicangle then
+ if italicangle and italicangle~=0 then
parameters.italicangle=italicangle
parameters.italicfactor=math.cos(math.rad(90+italicangle))
- parameters.slant=- math.round(math.tan(italicangle*math.pi/180))
+ parameters.slant=- math.tan(italicangle*math.pi/180)
end
if monospaced then
parameters.space_stretch=0
@@ -6759,7 +8339,7 @@ local function copytotfm(data,cache_id)
parameters.units=units
properties.space=spacer
properties.encodingbytes=2
- properties.format=data.format or fonts.formats[filename] or "opentype"
+ properties.format=data.format or otf_format(filename) or formats.otf
properties.noglyphnames=true
properties.filename=filename
properties.fontname=fontname
@@ -6784,10 +8364,27 @@ local function otftotfm(specification)
local name=specification.name
local sub=specification.sub
local filename=specification.filename
- local format=specification.format
local features=specification.features.normal
- local rawdata=otf.load(filename,format,sub,features and features.featurefile)
+ local rawdata=otf.load(filename,sub,features and features.featurefile)
if rawdata and next(rawdata) then
+ local descriptions=rawdata.descriptions
+ local duplicates=rawdata.resources.duplicates
+ if duplicates then
+ local nofduplicates,nofduplicated=0,0
+ for parent,list in next,duplicates do
+ for i=1,#list do
+ local unicode=list[i]
+ if not descriptions[unicode] then
+ descriptions[unicode]=descriptions[parent]
+ nofduplicated=nofduplicated+1
+ end
+ end
+ nofduplicates=nofduplicates+#list
+ end
+ if trace_otf and nofduplicated~=nofduplicates then
+ report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
+ end
+ end
rawdata.lookuphash={}
tfmdata=copytotfm(rawdata,cache_id)
if tfmdata and next(tfmdata) then
@@ -6868,41 +8465,33 @@ function otf.collectlookups(rawdata,kind,script,language)
end
return nil,nil
end
-local function check_otf(forced,specification,suffix,what)
+local function check_otf(forced,specification,suffix)
local name=specification.name
if forced then
- name=file.addsuffix(name,suffix,true)
+ name=specification.forcedname
end
local fullname=findbinfile(name,suffix) or ""
if fullname=="" then
fullname=fonts.names.getfilename(name,suffix) or ""
end
- if fullname~="" then
+ if fullname~="" and not fonts.names.ignoredfile(fullname) then
specification.filename=fullname
- specification.format=what
return read_from_otf(specification)
end
end
-local function opentypereader(specification,suffix,what)
+local function opentypereader(specification,suffix)
local forced=specification.forced or ""
- if forced=="otf" then
- return check_otf(true,specification,forced,"opentype")
- elseif forced=="ttf" or forced=="ttc" or forced=="dfont" then
- return check_otf(true,specification,forced,"truetype")
+ if formats[forced] then
+ return check_otf(true,specification,forced)
else
- return check_otf(false,specification,suffix,what)
+ return check_otf(false,specification,suffix)
end
end
-readers.opentype=opentypereader
-local formats=fonts.formats
-formats.otf="opentype"
-formats.ttf="truetype"
-formats.ttc="truetype"
-formats.dfont="truetype"
-function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end
-function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end
-function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end
-function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end
+readers.opentype=opentypereader
+function readers.otf (specification) return opentypereader(specification,"otf") end
+function readers.ttf (specification) return opentypereader(specification,"ttf") end
+function readers.ttc (specification) return opentypereader(specification,"ttf") end
+function readers.dfont(specification) return opentypereader(specification,"ttf") end
function otf.scriptandlanguage(tfmdata,attr)
local properties=tfmdata.properties
return properties.script or "dflt",properties.language or "dflt"
@@ -7415,8 +9004,9 @@ basemethods.shared={
basemethod="independent"
local function featuresinitializer(tfmdata,value)
if true then
- local t=trace_preparing and os.clock()
+ local starttime=trace_preparing and os.clock()
local features=tfmdata.shared.features
+ local fullname=trace_preparing and tfmdata.properties.fullname
if features then
applybasemethod("initializehashes",tfmdata)
local collectlookups=otf.collectlookups
@@ -7426,26 +9016,34 @@ local function featuresinitializer(tfmdata,value)
local language=properties.language
local basesubstitutions=rawdata.resources.features.gsub
local basepositionings=rawdata.resources.features.gpos
- if basesubstitutions then
- for feature,data in next,basesubstitutions do
- local value=features[feature]
- if value then
- local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- if basepositionings then
- for feature,data in next,basepositionings do
- local value=features[feature]
- if value then
- local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
- registerbasefeature(feature,value)
+ if basesubstitutions or basepositionings then
+ local sequences=tfmdata.resources.sequences
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local sfeatures=sequence.features
+ if sfeatures then
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local feature=order[i]
+ if features[feature] then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if not validlookups then
+ elseif basesubstitutions and basesubstitutions[feature] then
+ if trace_preparing then
+ report_prepare("filtering base feature %a for %a",feature,fullname)
+ end
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ elseif basepositionings and basepositionings[feature] then
+ if trace_preparing then
+ report_prepare("filtering base feature %a for %a",feature,fullname)
+ end
+ applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
end
end
end
@@ -7453,7 +9051,7 @@ local function featuresinitializer(tfmdata,value)
registerbasehash(tfmdata)
end
if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname)
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname)
end
end
end
@@ -7549,9 +9147,9 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
return 0,0
end
end
-function injections.setmark(start,base,factor,rlmode,ba,ma,index)
- local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
- local bound=base[a_markbase]
+function injections.setmark(start,base,factor,rlmode,ba,ma)
+ local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
+ local bound=base[a_markbase]
local index=1
if bound then
local mb=marks[bound]
@@ -7803,6 +9401,11 @@ function injections.handler(head,where,keep)
else
n.xoffset=p.xoffset-d[1]
end
+ local w=n.width
+ if w~=0 then
+ insert_node_before(head,n,newkern(-w/2))
+ insert_node_after(head,n,newkern(-w/2))
+ end
end
if mk[p] then
n.yoffset=p.yoffset+d[2]
@@ -7944,6 +9547,7 @@ analyzers.useunicodemarks=false
local a_state=attributes.private('state')
local nodecodes=nodes.nodecodes
local glyph_code=nodecodes.glyph
+local disc_code=nodecodes.disc
local math_code=nodecodes.math
local traverse_id=node.traverse_id
local traverse_node_list=node.traverse
@@ -7976,6 +9580,11 @@ local features={
medi=s_medi,
fina=s_fina,
isol=s_isol,
+ rphf=s_rphf,
+ half=s_half,
+ pref=s_pref,
+ blwf=s_blwf,
+ pstf=s_pstf,
}
analyzers.states=states
analyzers.features=features
@@ -8010,7 +9619,7 @@ function analyzers.setstate(head,font)
first,last,n=nil,nil,0
end
elseif id==disc_code then
- current[a_state]=s_midi
+ current[a_state]=s_medi
last=current
else
if first and first==last then
@@ -8062,7 +9671,7 @@ local function analyzeprocessor(head,font,attr)
end
registerotffeature {
name="analyze",
- description="analysis of (for instance) character classes",
+ description="analysis of character classes",
default=true,
initializers={
node=analyzeinitializer,
@@ -8339,6 +9948,7 @@ local default="dflt"
local nodecodes=nodes.nodecodes
local whatcodes=nodes.whatcodes
local glyphcodes=nodes.glyphcodes
+local disccodes=nodes.disccodes
local glyph_code=nodecodes.glyph
local glue_code=nodecodes.glue
local disc_code=nodecodes.disc
@@ -8346,6 +9956,7 @@ local whatsit_code=nodecodes.whatsit
local math_code=nodecodes.math
local dir_code=whatcodes.dir
local localpar_code=whatcodes.localpar
+local discretionary_code=disccodes.discretionary
local ligature_code=glyphcodes.ligature
local privateattribute=attributes.private
local a_state=privateattribute('state')
@@ -8593,13 +10204,13 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives
end
end
end
-local function multiple_glyphs(head,start,multiple)
+local function multiple_glyphs(head,start,multiple,ignoremarks)
local nofmultiples=#multiple
if nofmultiples>0 then
start.char=multiple[1]
if nofmultiples>1 then
local sn=start.next
- for k=2,nofmultiples do
+ for k=2,nofmultiples do
local n=copy_node(start)
n.char=multiple[k]
n.next=sn
@@ -8634,11 +10245,11 @@ function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence
end
return head,start,true
end
-function handlers.gsub_multiple(head,start,kind,lookupname,multiple)
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
if trace_multiples then
logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
end
- return multiple_glyphs(head,start,multiple)
+ return multiple_glyphs(head,start,multiple,sequence.flags[1])
end
function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
local s,stop,discfound=start.next,nil,false
@@ -8702,9 +10313,9 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
break
end
end
- if stop then
- local lig=ligature.ligature
- if lig then
+ local lig=ligature.ligature
+ if lig then
+ if stop then
if trace_ligatures then
local stopchar=stop.char
head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
@@ -8714,7 +10325,13 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
end
return head,start,true
else
+ start.char=lig
+ if trace_ligatures then
+ logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
+ end
+ return head,start,true
end
+ else
end
end
return head,start,false
@@ -8871,7 +10488,7 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
if al[anchor] then
local ma=markanchors[anchor]
if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -8972,7 +10589,6 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
prev=snext
snext=snext.next
else
- local krn=kerns[nextchar]
if not krn then
elseif type(krn)=="table" then
if lookuptype=="pair" then
@@ -9045,34 +10661,6 @@ function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,loo
return head,start,false
end
end
-local function delete_till_stop(start,stop,ignoremarks)
- local n=1
- if start==stop then
- elseif ignoremarks then
- repeat
- local next=start.next
- if not marks[next.char] then
- local components=next.components
- if components then
- flush_node_list(components)
- end
- delete_node(start,next)
- end
- n=n+1
- until next==stop
- else
- repeat
- local next=start.next
- local components=next.components
- if components then
- flush_node_list(components)
- end
- delete_node(start,next)
- n=n+1
- until next==stop
- end
- return n
-end
function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
local current=start
local subtables=currentlookup.subtables
@@ -9112,7 +10700,6 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
end
chainmores.gsub_single=chainprocs.gsub_single
function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- delete_till_stop(start,stop)
local startchar=start.char
local subtables=currentlookup.subtables
local lookupname=subtables[1]
@@ -9131,7 +10718,7 @@ function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,
if trace_multiples then
logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
end
- return multiple_glyphs(head,start,replacements)
+ return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
end
end
return head,start,false
@@ -9412,7 +10999,7 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
if al[anchor] then
local ma=markanchors[anchor]
if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -9515,6 +11102,7 @@ function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lo
end
return head,start,false
end
+chainmores.gpos_single=chainprocs.gpos_single
function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
local snext=start.next
if snext then
@@ -9583,6 +11171,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
end
return head,start,false
end
+chainmores.gpos_pair=chainprocs.gpos_pair
local function show_skip(kind,chainname,char,ck,class)
if ck[9] then
logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
@@ -9799,7 +11388,11 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if chainlookup then
local cp=chainprocs[chainlookup.type]
if cp then
- head,start,done=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ local ok
+ head,start,ok=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ if ok then
+ done=true
+ end
else
logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
end
@@ -9826,19 +11419,24 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
local chainlookupname=chainlookups[i]
- local chainlookup=lookuptable[chainlookupname]
- local cp=chainlookup and chainmores[chainlookup.type]
- if cp then
- local ok,n
- head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
- if ok then
- done=true
- i=i+(n or 1)
- else
+ local chainlookup=lookuptable[chainlookupname]
+ if not chainlookup then
+ i=i+1
+ else
+ local cp=chainmores[chainlookup.type]
+ if not cp then
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
i=i+1
+ else
+ local ok,n
+ head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ if ok then
+ done=true
+ i=i+(n or 1)
+ else
+ i=i+1
+ end
end
- else
- i=i+1
end
if start then
start=start.next
@@ -9920,14 +11518,20 @@ local autofeatures=fonts.analyzers.features
local function initialize(sequence,script,language,enabled)
local features=sequence.features
if features then
- for kind,scripts in next,features do
- local valid=enabled[kind]
- if valid then
- local languages=scripts[script] or scripts[wildcard]
- if languages and (languages[language] or languages[wildcard]) then
- return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local kind=order[i]
+ local valid=enabled[kind]
+ if valid then
+ local scripts=features[kind]
+ local languages=scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ end
end
end
+ else
end
end
return false
@@ -9954,12 +11558,12 @@ function otf.dataset(tfmdata,font)
}
rs[language]=rl
local sequences=tfmdata.resources.sequences
-for s=1,#sequences do
- local v=enabled and initialize(sequences[s],script,language,enabled)
- if v then
- rl[#rl+1]=v
- end
-end
+ for s=1,#sequences do
+ local v=enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1]=v
+ end
+ end
end
return rl
end
@@ -9985,227 +11589,404 @@ local function featuresprocessor(head,font,attr)
local done=false
local datasets=otf.dataset(tfmdata,font,attr)
local dirstack={}
-for s=1,#datasets do
- local dataset=datasets[s]
- featurevalue=dataset[1]
- local sequence=dataset[5]
- local rlparmode=0
- local topstack=0
- local success=false
- local attribute=dataset[2]
- local chain=dataset[3]
- local typ=sequence.type
- local subtables=sequence.subtables
- if chain<0 then
- local handler=handlers[typ]
- local start=find_node_tail(head)
+ for s=1,#datasets do
+ local dataset=datasets[s]
+ featurevalue=dataset[1]
+ local sequence=dataset[5]
+ local rlparmode=0
+ local topstack=0
+ local success=false
+ local attribute=dataset[2]
+ local chain=dataset[3]
+ local typ=sequence.type
+ local subtables=sequence.subtables
+ if chain<0 then
+ local handler=handlers[typ]
+ local start=find_node_tail(head)
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=a==attr
+ else
+ a=true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=start.prev end
+ else
+ start=start.prev
+ end
+ else
+ start=start.prev
+ end
+ else
+ start=start.prev
+ end
+ end
+ else
+ local handler=handlers[typ]
+ local ns=#subtables
+ local start=head
+ rlmode=0
+ if ns==1 then
+ local lookupname=subtables[1]
+ local lookupcache=lookuphash[lookupname]
+ if not lookupcache then
+ report_missing_cache(typ,lookupname)
+ else
+ local function subrun(start)
+ local head=start
+ local done=false
+ while start do
+ local id=start.id
+ if id==glyph_code and start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done=true
+ end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ else
+ start=start.next
+ end
+ end
+ if done then
+ success=true
+ return head
+ end
+ end
+ local function kerndisc(disc)
+ local prev=disc.prev
+ local next=disc.next
+ if prev and next then
+ prev.next=next
+ local a=prev[0]
+ if a then
+ a=(a==attr) and (not attribute or prev[a_state]==attribute)
+ else
+ a=not attribute or prev[a_state]==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[prev.char]
+ if lookupmatch then
+ local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done=true
+ success=true
+ end
+ end
+ end
+ prev.next=disc
+ end
+ return next
+ end
while start do
local id=start.id
if id==glyph_code then
if start.font==font and start.subtype<256 then
local a=start[0]
if a then
- a=a==attr
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
else
- a=true
+ a=not attribute or start[a_state]==attribute
end
if a then
- for i=1,#subtables do
- local lookupname=subtables[i]
- local lookupcache=lookuphash[lookupname]
- if lookupcache then
- local lookupmatch=lookupcache[start.char]
- if lookupmatch then
- head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if success then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success=true
end
end
- if start then start=start.prev end
+ if start then start=start.next end
else
- start=start.prev
+ start=start.next
end
else
- start=start.prev
+ start=start.next
end
+ elseif id==disc_code then
+ if start.subtype==discretionary_code then
+ local pre=start.pre
+ if pre then
+ local new=subrun(pre)
+ if new then start.pre=new end
+ end
+ local post=start.post
+ if post then
+ local new=subrun(post)
+ if new then start.post=new end
+ end
+ local replace=start.replace
+ if replace then
+ local new=subrun(replace)
+ if new then start.replace=new end
+ end
+elseif typ=="gpos_single" or typ=="gpos_pair" then
+ kerndisc(start)
+ end
+ start=start.next
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start=start.next
+ elseif id==math_code then
+ start=end_of_math(start).next
else
- start=start.prev
+ start=start.next
end
end
- else
- local handler=handlers[typ]
- local ns=#subtables
- local start=head
- rlmode=0
- if ns==1 then
- local lookupname=subtables[1]
- local lookupcache=lookuphash[lookupname]
- if not lookupcache then
- report_missing_cache(typ,lookupname)
- else
- while start do
- local id=start.id
- if id==glyph_code then
- if start.font==font and start.subtype<256 then
- local a=start[0]
- if a then
- a=(a==attr) and (not attribute or start[a_state]==attribute)
- else
- a=not attribute or start[a_state]==attribute
- end
- if a then
- local lookupmatch=lookupcache[start.char]
- if lookupmatch then
- local ok
- head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- success=true
- end
+ end
+ else
+ local function subrun(start)
+ local head=start
+ local done=false
+ while start do
+ local id=start.id
+ if id==glyph_code and start.id==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done=true
+ break
+ elseif not start then
+ break
end
- if start then start=start.next end
- else
- start=start.next
end
- elseif id==math_code then
- start=end_of_math(start).next
else
- start=start.next
+ report_missing_cache(typ,lookupname)
end
- elseif id==whatsit_code then
- local subtype=start.subtype
- if subtype==dir_code then
- local dir=start.dir
- if dir=="+TRT" or dir=="+TLT" then
- topstack=topstack+1
- dirstack[topstack]=dir
- elseif dir=="-TRT" or dir=="-TLT" then
- topstack=topstack-1
- end
- local newdir=dirstack[topstack]
- if newdir=="+TRT" then
- rlmode=-1
- elseif newdir=="+TLT" then
- rlmode=1
- else
- rlmode=rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- elseif subtype==localpar_code then
- local dir=start.dir
- if dir=="TRT" then
- rlparmode=-1
- elseif dir=="TLT" then
- rlparmode=1
- else
- rlparmode=0
- end
- rlmode=rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ else
+ start=start.next
+ end
+ end
+ if done then
+ success=true
+ return head
+ end
+ end
+ local function kerndisc(disc)
+ local prev=disc.prev
+ local next=disc.next
+ if prev and next then
+ prev.next=next
+ local a=prev[0]
+ if a then
+ a=(a==attr) and (not attribute or prev[a_state]==attribute)
+ else
+ a=not attribute or prev[a_state]==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[prev.char]
+ if lookupmatch then
+ local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done=true
+ break
end
end
- start=start.next
- elseif id==math_code then
- start=end_of_math(start).next
else
- start=start.next
+ report_missing_cache(typ,lookupname)
end
end
end
- else
- while start do
- local id=start.id
- if id==glyph_code then
- if start.font==font and start.subtype<256 then
- local a=start[0]
- if a then
- a=(a==attr) and (not attribute or start[a_state]==attribute)
- else
- a=not attribute or start[a_state]==attribute
- end
- if a then
- for i=1,ns do
- local lookupname=subtables[i]
- local lookupcache=lookuphash[lookupname]
- if lookupcache then
- local lookupmatch=lookupcache[start.char]
- if lookupmatch then
- local ok
- head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- success=true
- break
- elseif not start then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
+ prev.next=disc
+ end
+ return next
+ end
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success=true
+ break
+ elseif not start then
+ break
end
end
- if start then start=start.next end
else
- start=start.next
+ report_missing_cache(typ,lookupname)
end
- else
- start=start.next
end
- elseif id==whatsit_code then
- local subtype=start.subtype
- if subtype==dir_code then
- local dir=start.dir
- if dir=="+TRT" or dir=="+TLT" then
- topstack=topstack+1
- dirstack[topstack]=dir
- elseif dir=="-TRT" or dir=="-TLT" then
- topstack=topstack-1
- end
- local newdir=dirstack[topstack]
- if newdir=="+TRT" then
- rlmode=-1
- elseif newdir=="+TLT" then
- rlmode=1
- else
- rlmode=rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- elseif subtype==localpar_code then
- local dir=start.dir
- if dir=="TRT" then
- rlparmode=-1
- elseif dir=="TLT" then
- rlparmode=1
- else
- rlparmode=0
- end
- rlmode=rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
- end
- end
- start=start.next
- elseif id==math_code then
- start=end_of_math(start).next
+ if start then start=start.next end
else
start=start.next
end
+ else
+ start=start.next
+ end
+ elseif id==disc_code then
+ if start.subtype==discretionary_code then
+ local pre=start.pre
+ if pre then
+ local new=subrun(pre)
+ if new then start.pre=new end
+ end
+ local post=start.post
+ if post then
+ local new=subrun(post)
+ if new then start.post=new end
+ end
+ local replace=start.replace
+ if replace then
+ local new=subrun(replace)
+ if new then start.replace=new end
+ end
+elseif typ=="gpos_single" or typ=="gpos_pair" then
+ kerndisc(start)
end
+ start=start.next
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start=start.next
+ elseif id==math_code then
+ start=end_of_math(start).next
+ else
+ start=start.next
end
end
- if success then
- done=true
- end
- if trace_steps then
- registerstep(head)
- end
+ end
+ end
+ if success then
+ done=true
+ end
+ if trace_steps then
+ registerstep(head)
+ end
end
return head,done
end
@@ -10809,6 +12590,14 @@ local function packdata(data)
features[script]=pack_normal(feature)
end
end
+ local order=sequence.order
+ if order then
+ sequence.order=pack_indexed(order)
+ end
+ local markclass=sequence.markclass
+ if markclass then
+ sequence.markclass=pack_boolean(markclass)
+ end
end
end
local lookups=resources.lookups
@@ -11221,6 +13010,20 @@ local function unpackdata(data)
end
end
end
+ local order=feature.order
+ if order then
+ local tv=tables[order]
+ if tv then
+ feature.order=tv
+ end
+ end
+ local markclass=feature.markclass
+ if markclass then
+ local tv=tables[markclass]
+ if tv then
+ feature.markclass=tv
+ end
+ end
end
end
local lookups=resources.lookups
@@ -11315,6 +13118,7 @@ if not modules then modules={} end modules ['font-def']={
local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub
local tostring,next=tostring,next
local lpegmatch=lpeg.match
+local suffixonly,removesuffix=file.suffix,file.removesuffix
local allocate=utilities.storage.allocate
local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end)
local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end)
@@ -11362,7 +13166,7 @@ addlookup("file")
addlookup("name")
addlookup("spec")
local function getspecification(str)
- return lpegmatch(splitter,str)
+ return lpegmatch(splitter,str or "")
end
definers.getspecification=getspecification
function definers.registersplit(symbol,action,verbosename)
@@ -11404,10 +13208,11 @@ definers.resolvers=definers.resolvers or {}
local resolvers=definers.resolvers
function resolvers.file(specification)
local name=resolvefile(specification.name)
- local suffix=file.suffix(name)
+ local suffix=lower(suffixonly(name))
if fonts.formats[suffix] then
specification.forced=suffix
- specification.name=file.removesuffix(name)
+ specification.forcedname=name
+ specification.name=removesuffix(name)
else
specification.name=name
end
@@ -11419,10 +13224,11 @@ function resolvers.name(specification)
if resolved then
specification.resolved=resolved
specification.sub=sub
- local suffix=file.suffix(resolved)
+ local suffix=lower(suffixonly(resolved))
if fonts.formats[suffix] then
specification.forced=suffix
- specification.name=file.removesuffix(resolved)
+ specification.forcedname=resolved
+ specification.name=removesuffix(resolved)
else
specification.name=resolved
end
@@ -11438,8 +13244,9 @@ function resolvers.spec(specification)
if resolved then
specification.resolved=resolved
specification.sub=sub
- specification.forced=file.suffix(resolved)
- specification.name=file.removesuffix(resolved)
+ specification.forced=lower(suffixonly(resolved))
+ specification.forcedname=resolved
+ specification.name=removesuffix(resolved)
end
else
resolvers.name(specification)
@@ -11454,8 +13261,7 @@ function definers.resolve(specification)
end
if specification.forced=="" then
specification.forced=nil
- else
- specification.forced=specification.forced
+ specification.forcedname=nil
end
specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification))
if specification.sub and specification.sub~="" then
@@ -11500,7 +13306,7 @@ function definers.loadfont(specification)
if not tfmdata then
local forced=specification.forced or ""
if forced~="" then
- local reader=readers[lower(forced)]
+ local reader=readers[lower(forced)]
tfmdata=reader and reader(specification)
if not tfmdata then
report_defining("forced type %a of %a not found",forced,specification.name)
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-otn.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-otn.lua
new file mode 100644
index 00000000000..068f0a9b926
--- /dev/null
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-otn.lua
@@ -0,0 +1,2848 @@
+if not modules then modules = { } end modules ['font-otn'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- preprocessors = { "nodes" }
+
+-- this is still somewhat preliminary and it will get better in due time;
+-- much functionality could only be implemented thanks to the husayni font
+-- of Idris Samawi Hamid to who we dedicate this module.
+
+-- in retrospect it always looks easy but believe it or not, it took a lot
+-- of work to get proper open type support done: buggy fonts, fuzzy specs,
+-- special made testfonts, many skype sessions between taco, idris and me,
+-- torture tests etc etc ... unfortunately the code does not show how much
+-- time it took ...
+
+-- todo:
+--
+-- kerning is probably not yet ok for latin around dics nodes (interesting challenge)
+-- extension infrastructure (for usage out of context)
+-- sorting features according to vendors/renderers
+-- alternative loop quitters
+-- check cursive and r2l
+-- find out where ignore-mark-classes went
+-- default features (per language, script)
+-- handle positions (we need example fonts)
+-- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
+-- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
+-- remove some optimizations (when I have a faster machine)
+--
+-- maybe redo the lot some way (more context specific)
+
+--[[ldx--
+<p>This module is a bit more split up that I'd like but since we also want to test
+with plain <l n='tex'/> it has to be so. This module is part of <l n='context'/>
+and discussion about improvements and functionality mostly happens on the
+<l n='context'/> mailing list.</p>
+
+<p>The specification of OpenType is kind of vague. Apart from a lack of a proper
+free specifications there's also the problem that Microsoft and Adobe
+may have their own interpretation of how and in what order to apply features.
+In general the Microsoft website has more detailed specifications and is a
+better reference. There is also some information in the FontForge help files.</p>
+
+<p>Because there is so much possible, fonts might contain bugs and/or be made to
+work with certain rederers. These may evolve over time which may have the side
+effect that suddenly fonts behave differently.</p>
+
+<p>After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
+implementation. Of course all errors are mine and of course the code can be
+improved. There are quite some optimizations going on here and processing speed
+is currently acceptable. Not all functions are implemented yet, often because I
+lack the fonts for testing. Many scripts are not yet supported either, but I will
+look into them as soon as <l n='context'/> users ask for it.</p>
+
+<p>Because there are different interpretations possible, I will extend the code
+with more (configureable) variants. I can also add hooks for users so that they can
+write their own extensions.</p>
+
+<p>Glyphs are indexed not by unicode but in their own way. This is because there is no
+relationship with unicode at all, apart from the fact that a font might cover certain
+ranges of characters. One character can have multiple shapes. However, at the
+<l n='tex'/> end we use unicode so and all extra glyphs are mapped into a private
+space. This is needed because we need to access them and <l n='tex'/> has to include
+then in the output eventually.</p>
+
+<p>The raw table as it coms from <l n='fontforge'/> gets reorganized in to fit out needs.
+In <l n='context'/> that table is packed (similar tables are shared) and cached on disk
+so that successive runs can use the optimized table (after loading the table is
+unpacked). The flattening code used later is a prelude to an even more compact table
+format (and as such it keeps evolving).</p>
+
+<p>This module is sparsely documented because it is a moving target. The table format
+of the reader changes and we experiment a lot with different methods for supporting
+features.</p>
+
+<p>As with the <l n='afm'/> code, we may decide to store more information in the
+<l n='otf'/> table.</p>
+
+<p>Incrementing the version number will force a re-cache. We jump the number by one
+when there's a fix in the <l n='fontforge'/> library or <l n='lua'/> code that
+results in different tables.</p>
+--ldx]]--
+
+-- action handler chainproc chainmore comment
+--
+-- gsub_single ok ok ok
+-- gsub_multiple ok ok not implemented yet
+-- gsub_alternate ok ok not implemented yet
+-- gsub_ligature ok ok ok
+-- gsub_context ok --
+-- gsub_contextchain ok --
+-- gsub_reversecontextchain ok --
+-- chainsub -- ok
+-- reversesub -- ok
+-- gpos_mark2base ok ok
+-- gpos_mark2ligature ok ok
+-- gpos_mark2mark ok ok
+-- gpos_cursive ok untested
+-- gpos_single ok ok
+-- gpos_pair ok ok
+-- gpos_context ok --
+-- gpos_contextchain ok --
+--
+-- todo: contextpos and contextsub and class stuff
+--
+-- actions:
+--
+-- handler : actions triggered by lookup
+-- chainproc : actions triggered by contextual lookup
+-- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
+--
+-- remark: the 'not implemented yet' variants will be done when we have fonts that use them
+-- remark: we need to check what to do with discretionaries
+
+-- We used to have independent hashes for lookups but as the tags are unique
+-- we now use only one hash. If needed we can have multiple again but in that
+-- case I will probably prefix (i.e. rename) the lookups in the cached font file.
+
+-- Todo: make plugin feature that operates on char/glyphnode arrays
+
+local concat, insert, remove = table.concat, table.insert, table.remove
+local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
+local type, next, tonumber, tostring = type, next, tonumber, tostring
+local lpegmatch = lpeg.match
+local random = math.random
+local formatters = string.formatters
+
+local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
+
+local registertracker = trackers.register
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+
+local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end)
+local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
+local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
+local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
+local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
+local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
+local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
+local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
+local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
+local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
+local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
+local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
+local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end)
+local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
+local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
+local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
+
+local report_direct = logs.reporter("fonts","otf direct")
+local report_subchain = logs.reporter("fonts","otf subchain")
+local report_chain = logs.reporter("fonts","otf chain")
+local report_process = logs.reporter("fonts","otf process")
+local report_prepare = logs.reporter("fonts","otf prepare")
+local report_warning = logs.reporter("fonts","otf warning")
+
+registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
+registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
+
+registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
+registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
+registertracker("otf.actions","otf.replacements,otf.positions")
+registertracker("otf.injections","nodes.injections")
+
+registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
+
+local insert_node_after = node.insert_after
+local delete_node = nodes.delete
+local copy_node = node.copy
+local find_node_tail = node.tail or node.slide
+local flush_node_list = node.flush_list
+local end_of_math = node.end_of_math
+
+local setmetatableindex = table.setmetatableindex
+
+local zwnj = 0x200C
+local zwj = 0x200D
+local wildcard = "*"
+local default = "dflt"
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+local glyphcodes = nodes.glyphcodes
+local disccodes = nodes.disccodes
+
+local glyph_code = nodecodes.glyph
+local glue_code = nodecodes.glue
+local disc_code = nodecodes.disc
+local whatsit_code = nodecodes.whatsit
+local math_code = nodecodes.math
+
+local dir_code = whatcodes.dir
+local localpar_code = whatcodes.localpar
+
+local discretionary_code = disccodes.discretionary
+
+local ligature_code = glyphcodes.ligature
+
+local privateattribute = attributes.private
+
+-- Something is messed up: we have two mark / ligature indices, one at the injection
+-- end and one here ... this is bases in KE's patches but there is something fishy
+-- there as I'm pretty sure that for husayni we need some connection (as it's much
+-- more complex than an average font) but I need proper examples of all cases, not
+-- of only some.
+
+local a_state = privateattribute('state')
+local a_markbase = privateattribute('markbase')
+local a_markmark = privateattribute('markmark')
+local a_markdone = privateattribute('markdone') -- assigned at the injection end
+local a_cursbase = privateattribute('cursbase')
+local a_curscurs = privateattribute('curscurs')
+local a_cursdone = privateattribute('cursdone')
+local a_kernpair = privateattribute('kernpair')
+local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined)
+
+local injections = nodes.injections
+local setmark = injections.setmark
+local setcursive = injections.setcursive
+local setkern = injections.setkern
+local setpair = injections.setpair
+
+local markonce = true
+local cursonce = true
+local kernonce = true
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local onetimemessage = fonts.loggers.onetimemessage or function() end
+
+otf.defaultnodealternate = "none" -- first last
+
+-- we share some vars here, after all, we have no nested lookups and less code
+
+local tfmdata = false
+local characters = false
+local descriptions = false
+local resources = false
+local marks = false
+local currentfont = false
+local lookuptable = false
+local anchorlookups = false
+local lookuptypes = false
+local handlers = { }
+local rlmode = 0
+local featurevalue = false
+
+-- head is always a whatsit so we can safely assume that head is not changed
+
+-- we use this for special testing and documentation
+
+local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
+local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
+local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_direct(...)
+end
+
+local function logwarning(...)
+ report_direct(...)
+end
+
+local f_unicode = formatters["%U"]
+local f_uniname = formatters["%U (%s)"]
+local f_unilist = formatters["% t (% t)"]
+
+local function gref(n) -- currently the same as in font-otb
+ if type(n) == "number" then
+ local description = descriptions[n]
+ local name = description and description.name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num, nam = { }, { }
+ for i=1,#n do
+ local ni = n[i]
+ if tonumber(ni) then -- later we will start at 2
+ local di = descriptions[ni]
+ num[i] = f_unicode(ni)
+ nam[i] = di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in node mode tracing>"
+ end
+end
+
+local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_
+ if index then
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
+ elseif lookupname then
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
+ elseif chainlookupname then
+ return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
+ elseif chainname then
+ return formatters["feature %a, chain %a"](kind,chainname)
+ else
+ return formatters["feature %a"](kind)
+ end
+end
+
+local function pref(kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookupname)
+end
+
+-- We can assume that languages that use marks are not hyphenated. We can also assume
+-- that at most one discretionary is present.
+
+-- We do need components in funny kerning mode but maybe I can better reconstruct then
+-- as we do have the font components info available; removing components makes the
+-- previous code much simpler. Also, later on copying and freeing becomes easier.
+-- However, for arabic we need to keep them around for the sake of mark placement
+-- and indices.
+
+local function copy_glyph(g) -- next and prev are untouched !
+ local components = g.components
+ if components then
+ g.components = nil
+ local n = copy_node(g)
+ g.components = components
+ return n
+ else
+ return copy_node(g)
+ end
+end
+
+-- start is a mark and we need to keep that one
+
+local function markstoligature(kind,lookupname,head,start,stop,char)
+ if start == stop and start.char == char then
+ return head, start
+ else
+ local prev = start.prev
+ local next = stop.next
+ start.prev = nil
+ stop.next = nil
+ local base = copy_glyph(start)
+ if head == start then
+ head = base
+ end
+ base.char = char
+ base.subtype = ligature_code
+ base.components = start
+ if prev then
+ prev.next = base
+ end
+ if next then
+ next.prev = base
+ end
+ base.next = next
+ base.prev = prev
+ return head, base
+ end
+end
+
+-- The next code is somewhat complicated by the fact that some fonts can have ligatures made
+-- from ligatures that themselves have marks. This was identified by Kai in for instance
+-- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes
+-- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next
+-- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the
+-- third component.
+
+local function getcomponentindex(start)
+ if start.id ~= glyph_code then
+ return 0
+ elseif start.subtype == ligature_code then
+ local i = 0
+ local components = start.components
+ while components do
+ i = i + getcomponentindex(components)
+ components = components.next
+ end
+ return i
+ elseif not marks[start.char] then
+ return 1
+ else
+ return 0
+ end
+end
+
+-- eventually we will do positioning in an other way (needs addional w/h/d fields)
+
+local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
+ if start == stop and start.char == char then
+ start.char = char
+ return head, start
+ end
+ local prev = start.prev
+ local next = stop.next
+ start.prev = nil
+ stop.next = nil
+ local base = copy_glyph(start)
+ if start == head then
+ head = base
+ end
+ base.char = char
+ base.subtype = ligature_code
+ base.components = start -- start can have components
+ if prev then
+ prev.next = base
+ end
+ if next then
+ next.prev = base
+ end
+ base.next = next
+ base.prev = prev
+ if not discfound then
+ local deletemarks = markflag ~= "mark"
+ local components = start
+ local baseindex = 0
+ local componentindex = 0
+ local head = base
+ local current = base
+ -- first we loop over the glyphs in start .. stop
+ while start do
+ local char = start.char
+ if not marks[char] then
+ baseindex = baseindex + componentindex
+ componentindex = getcomponentindex(start)
+ elseif not deletemarks then -- quite fishy
+ start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components
+ elseif trace_marks then
+ logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
+ end
+ start = start.next
+ end
+ -- we can have one accent as part of a lookup and another following
+ -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
+ local start = current.next
+ while start and start.id == glyph_code do
+ local char = start.char
+ if marks[char] then
+ start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ else
+ break
+ end
+ start = start.next
+ end
+ end
+ return head, base
+end
+
+function handlers.gsub_single(head,start,kind,lookupname,replacement)
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
+ end
+ start.char = replacement
+ return head, start, true
+end
+
+local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
+ local n = #alternatives
+ if value == "random" then
+ local r = random(1,n)
+ return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r)
+ elseif value == "first" then
+ return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1)
+ elseif value == "last" then
+ return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n)
+ else
+ value = tonumber(value)
+ if type(value) ~= "number" then
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif value > n then
+ local defaultalt = otf.defaultnodealternate
+ if defaultalt == "first" then
+ return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif defaultalt == "last" then
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
+ else
+ return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
+ end
+ elseif value == 0 then
+ return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ elseif value < 1 then
+ return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
+ else
+ return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value)
+ end
+ end
+end
+
+local function multiple_glyphs(head,start,multiple,ignoremarks)
+ local nofmultiples = #multiple
+ if nofmultiples > 0 then
+ start.char = multiple[1]
+ if nofmultiples > 1 then
+ local sn = start.next
+ for k=2,nofmultiples do -- todo: use insert_node
+-- untested:
+--
+-- while ignoremarks and marks[sn.char] then
+-- local sn = sn.next
+-- end
+ local n = copy_node(start) -- ignore components
+ n.char = multiple[k]
+ n.next = sn
+ n.prev = start
+ if sn then
+ sn.prev = n
+ end
+ start.next = n
+ start = n
+ end
+ end
+ return head, start, true
+ else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(start.char))
+ end
+ return head, start, false
+ end
+end
+
+function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
+ local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
+ local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
+ end
+ start.char = choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
+ end
+ end
+ return head, start, true
+end
+
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
+ end
+ return multiple_glyphs(head,start,multiple,sequence.flags[1])
+end
+
+function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
+ local s, stop, discfound = start.next, nil, false
+ local startchar = start.char
+ if marks[startchar] then
+ while s do
+ local id = s.id
+ if id == glyph_code and s.font == currentfont and s.subtype<256 then
+ local lg = ligature[s.char]
+ if lg then
+ stop = s
+ ligature = lg
+ s = s.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if stop then
+ local lig = ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar = stop.char
+ head, start = markstoligature(kind,lookupname,head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head, start = markstoligature(kind,lookupname,head,start,stop,lig)
+ end
+ return head, start, true
+ else
+ -- ok, goto next lookup
+ end
+ end
+ else
+ local skipmark = sequence.flags[1]
+ while s do
+ local id = s.id
+ if id == glyph_code and s.subtype<256 then
+ if s.font == currentfont then
+ local char = s.char
+ if skipmark and marks[char] then
+ s = s.next
+ else
+ local lg = ligature[char]
+ if lg then
+ stop = s
+ ligature = lg
+ s = s.next
+ else
+ break
+ end
+ end
+ else
+ break
+ end
+ elseif id == disc_code then
+ discfound = true
+ s = s.next
+ else
+ break
+ end
+ end
+ local lig = ligature.ligature
+ if lig then
+ if stop then
+ if trace_ligatures then
+ local stopchar = stop.char
+ head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ end
+ return head, start, true
+ else
+ -- weird but happens (in some arabic font)
+ start.char = lig
+ if trace_ligatures then
+ logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
+ end
+ return head, start, true
+ end
+ else
+ -- weird but happens
+ end
+ end
+ return head, start, false
+end
+
+--[[ldx--
+<p>We get hits on a mark, but we're not sure if the it has to be applied so
+we need to explicitly test for basechar, baselig and basemark entries.</p>
+--ldx]]--
+
+function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
+ local markchar = start.char
+ if marks[markchar] then
+ local base = start.prev -- [glyph] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
+ if marks[basechar] then
+ while true do
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ end
+ if baseanchors then
+ local baseanchors = baseanchors['basechar']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
+ -- check chainpos variant
+ local markchar = start.char
+ if marks[markchar] then
+ local base = start.prev -- [glyph] [optional marks] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
+ if marks[basechar] then
+ while true do
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local index = start[a_ligacomp]
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ if baseanchors then
+ local baseanchors = baseanchors['baselig']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ ba = ba[index]
+ if ba then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head, start, true
+ else
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index)
+ end
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
+ local markchar = start.char
+ if marks[markchar] then
+ local base = start.prev -- [glyph] [basemark] [start=mark]
+ local slc = start[a_ligacomp]
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = base[a_ligacomp]
+ if blc and blc ~= slc then
+ base = base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
+ local basechar = base.char
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ if baseanchors then
+ baseanchors = baseanchors['basemark']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
+ local alreadydone = cursonce and start[a_cursbase]
+ if not alreadydone then
+ local done = false
+ local startchar = start.char
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt = start.next
+ while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
+ local nextchar = nxt.char
+ if marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = nxt.next
+ else
+ local entryanchors = descriptions[nextchar]
+ if entryanchors then
+ entryanchors = entryanchors.anchors
+ if entryanchors then
+ entryanchors = entryanchors['centry']
+ if entryanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, entry in next, entryanchors do
+ if al[anchor] then
+ local exit = exitanchors[anchor]
+ if exit then
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done = true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head, start, done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head, start, false
+ end
+end
+
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
+ local startchar = start.char
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ end
+ return head, start, false
+end
+
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
+ -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
+ -- todo: kerns in components of ligatures
+ local snext = start.next
+ if not snext then
+ return head, start, false
+ else
+ local prev, done = start, false
+ local factor = tfmdata.parameters.factor
+ local lookuptype = lookuptypes[lookupname]
+ while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
+ local nextchar = snext.char
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = snext.next
+ else
+ if not krn then
+ -- skip
+ elseif type(krn) == "table" then
+ if lookuptype == "pair" then -- probably not needed
+ local a, b = krn[2], krn[3]
+ if a and #a > 0 then
+ local startchar = start.char
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local startchar = start.char
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else -- wrong ... position has different entries
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ -- local a, b = krn[2], krn[6]
+ -- if a and a ~= 0 then
+ -- local k = setkern(snext,factor,rlmode,a)
+ -- if trace_kerns then
+ -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ -- end
+ -- end
+ -- if b and b ~= 0 then
+ -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
+ -- end
+ end
+ done = true
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done = true
+ end
+ break
+ end
+ end
+ return head, start, done
+ end
+end
+
+--[[ldx--
+<p>I will implement multiple chain replacements once I run into a font that uses
+it. It's not that complex to handle.</p>
+--ldx]]--
+
+local chainmores = { }
+local chainprocs = { }
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_subchain(...)
+end
+
+local logwarning = report_subchain
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_chain(...)
+end
+
+local logwarning = report_chain
+
+-- We could share functions but that would lead to extra function calls with many
+-- arguments, redundant tests and confusing messages.
+
+function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
+ logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head, start, false
+end
+
+function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
+ logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head, start, false
+end
+
+-- The reversesub is a special case, which is why we need to store the replacements
+-- in a bit weird way. There is no lookup and the replacement comes from the lookup
+-- itself. It is meant mostly for dealing with Urdu.
+
+function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
+ local char = start.char
+ local replacement = replacements[char]
+ if replacement then
+ if trace_singles then
+ logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
+ end
+ start.char = replacement
+ return head, start, true
+ else
+ return head, start, false
+ end
+end
+
+--[[ldx--
+<p>This chain stuff is somewhat tricky since we can have a sequence of actions to be
+applied: single, alternate, multiple or ligature where ligature can be an invalid
+one in the sense that it will replace multiple by one but not neccessary one that
+looks like the combination (i.e. it is the counterpart of multiple then). For
+example, the following is valid:</p>
+
+<typing>
+<line>xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx</line>
+</typing>
+
+<p>Therefore we we don't really do the replacement here already unless we have the
+single lookup case. The efficiency of the replacements can be improved by deleting
+as less as needed but that would also make the code even more messy.</p>
+--ldx]]--
+
+-- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start
+-- local n = 1
+-- if start == stop then
+-- -- done
+-- elseif ignoremarks then
+-- repeat -- start x x m x x stop => start m
+-- local next = start.next
+-- if not marks[next.char] then
+-- local components = next.components
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- end
+-- n = n + 1
+-- until next == stop
+-- else -- start x x x stop => start
+-- repeat
+-- local next = start.next
+-- local components = next.components
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- n = n + 1
+-- until next == stop
+-- end
+-- return head, n
+-- end
+
+--[[ldx--
+<p>Here we replace start by a single variant, First we delete the rest of the
+match.</p>
+--ldx]]--
+
+function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ -- todo: marks ?
+ local current = start
+ local subtables = currentlookup.subtables
+ if #subtables > 1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
+ while current do
+ if current.id == glyph_code then
+ local currentchar = current.char
+ local lookupname = subtables[1] -- only 1
+ local replacement = lookuphash[lookupname]
+ if not replacement then
+ if trace_bugs then
+ logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ replacement = replacement[currentchar]
+ if not replacement or replacement == "" then
+ if trace_bugs then
+ logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
+ end
+ else
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
+ end
+ current.char = replacement
+ end
+ end
+ return head, start, true
+ elseif current == stop then
+ break
+ else
+ current = current.next
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_single = chainprocs.gsub_single
+
+--[[ldx--
+<p>Here we replace start by a sequence of new glyphs. First we delete the rest of
+the match.</p>
+--ldx]]--
+
+function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ -- local head, n = delete_till_stop(head,start,stop)
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local replacements = lookuphash[lookupname]
+ if not replacements then
+ if trace_bugs then
+ logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ replacements = replacements[startchar]
+ if not replacements or replacement == "" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
+ end
+ else
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
+ end
+ return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_multiple = chainprocs.gsub_multiple
+
+--[[ldx--
+<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
+--ldx]]--
+
+-- char_1 mark_1 -> char_x mark_1 (ignore marks)
+-- char_1 mark_1 -> char_x
+
+-- to be checked: do we always have just one glyph?
+-- we can also have alternates for marks
+-- marks come last anyway
+-- are there cases where we need to delete the mark
+
+function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local current = start
+ local subtables = currentlookup.subtables
+ local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
+ while current do
+ if current.id == glyph_code then -- is this check needed?
+ local currentchar = current.char
+ local lookupname = subtables[1]
+ local alternatives = lookuphash[lookupname]
+ if not alternatives then
+ if trace_bugs then
+ logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ alternatives = alternatives[currentchar]
+ if alternatives then
+ local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
+ end
+ start.char = choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
+ end
+ end
+ return head, start, true
+ elseif current == stop then
+ break
+ else
+ current = current.next
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_alternate = chainprocs.gsub_alternate
+
+--[[ldx--
+<p>When we replace ligatures we use a helper that handles the marks. I might change
+this function (move code inline and handle the marks by a separate function). We
+assume rather stupid ligatures (no complex disc nodes).</p>
+--ldx]]--
+
+function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local ligatures = lookuphash[lookupname]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ ligatures = ligatures[startchar]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ end
+ else
+ local s = start.next
+ local discfound = false
+ local last = stop
+ local nofreplacements = 0
+ local skipmark = currentlookup.flags[1]
+ while s do
+ local id = s.id
+ if id == disc_code then
+ s = s.next
+ discfound = true
+ else
+ local schar = s.char
+ if skipmark and marks[schar] then -- marks
+ s = s.next
+ else
+ local lg = ligatures[schar]
+ if lg then
+ ligatures, last, nofreplacements = lg, s, nofreplacements + 1
+ if s == stop then
+ break
+ else
+ s = s.next
+ end
+ else
+ break
+ end
+ end
+ end
+ end
+ local l2 = ligatures.ligature
+ if l2 then
+ if chainindex then
+ stop = last
+ end
+ if trace_ligatures then
+ if start == stop then
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
+ else
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ end
+ end
+ head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
+ return head, start, true, nofreplacements
+ elseif trace_bugs then
+ if start == stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
+ end
+ end
+ end
+ end
+ return head, start, false, 0
+end
+
+chainmores.gsub_ligature = chainprocs.gsub_ligature
+
+function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = start.char
+ if marks[markchar] then
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = start.prev -- [glyph] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
+ if marks[basechar] then
+ while true do
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors = baseanchors['basechar']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = start.char
+ if marks[markchar] then
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = start.prev -- [glyph] [optional marks] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
+ if marks[basechar] then
+ while true do
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
+ end
+ return head, start, false
+ end
+ end
+ end
+ -- todo: like marks a ligatures hash
+ local index = start[a_ligacomp]
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors = baseanchors['baselig']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ ba = ba[index]
+ if ba then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = start.char
+ if marks[markchar] then
+ -- local alreadydone = markonce and start[a_markmark]
+ -- if not alreadydone then
+ -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = start.prev -- [glyph] [basemark] [start=mark]
+ local slc = start[a_ligacomp]
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = base[a_ligacomp]
+ if blc and blc ~= slc then
+ base = base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
+ local basechar = base.char
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors = baseanchors['basemark']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ -- elseif trace_marks and trace_details then
+ -- logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone)
+ -- end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local alreadydone = cursonce and start[a_cursbase]
+ if not alreadydone then
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local exitanchors = lookuphash[lookupname]
+ if exitanchors then
+ exitanchors = exitanchors[startchar]
+ end
+ if exitanchors then
+ local done = false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt = start.next
+ while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
+ local nextchar = nxt.char
+ if marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = nxt.next
+ else
+ local entryanchors = descriptions[nextchar]
+ if entryanchors then
+ entryanchors = entryanchors.anchors
+ if entryanchors then
+ entryanchors = entryanchors['centry']
+ if entryanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, entry in next, entryanchors do
+ if al[anchor] then
+ local exit = exitanchors[anchor]
+ if exit then
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done = true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head, start, done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head, start, false
+ end
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ -- untested .. needs checking for the new model
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local kerns = lookuphash[lookupname]
+ if kerns then
+ kerns = kerns[startchar] -- needed ?
+ if kerns then
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ end
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gpos_single = chainprocs.gpos_single -- okay?
+
+-- when machines become faster i will make a shared function
+
+function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local snext = start.next
+ if snext then
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local kerns = lookuphash[lookupname]
+ if kerns then
+ kerns = kerns[startchar]
+ if kerns then
+ local lookuptype = lookuptypes[lookupname]
+ local prev, done = start, false
+ local factor = tfmdata.parameters.factor
+ while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
+ local nextchar = snext.char
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = snext.next
+ else
+ if not krn then
+ -- skip
+ elseif type(krn) == "table" then
+ if lookuptype == "pair" then
+ local a, b = krn[2], krn[3]
+ if a and #a > 0 then
+ local startchar = start.char
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local startchar = start.char
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ local a, b = krn[2], krn[6]
+ if a and a ~= 0 then
+ local k = setkern(snext,factor,rlmode,a)
+ if trace_kerns then
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ end
+ if b and b ~= 0 then
+ logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
+ end
+ end
+ done = true
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done = true
+ end
+ break
+ end
+ end
+ return head, start, done
+ end
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gpos_pair = chainprocs.gpos_pair -- okay?
+
+-- what pointer to return, spec says stop
+-- to be discussed ... is bidi changer a space?
+-- elseif char == zwnj and sequence[n][32] then -- brrr
+
+-- somehow l or f is global
+-- we don't need to pass the currentcontext, saves a bit
+-- make a slow variant then can be activated but with more tracing
+
+local function show_skip(kind,chainname,char,ck,class)
+ if ck[9] then
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
+end
+
+local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
+ -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
+ local flags = sequence.flags
+ local done = false
+ local skipmark = flags[1]
+ local skipligature = flags[2]
+ local skipbase = flags[3]
+ local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !)
+ local markclass = sequence.markclass -- todo, first we need a proper test
+ local skipped = false
+ for k=1,#contexts do
+ local match = true
+ local current = start
+ local last = start
+ local ck = contexts[k]
+ local seq = ck[3]
+ local s = #seq
+ -- f..l = mid string
+ if s == 1 then
+ -- never happens
+ match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char]
+ else
+ -- maybe we need a better space check (maybe check for glue or category or combination)
+ -- we cannot optimize for n=2 because there can be disc nodes
+ local f, l = ck[4], ck[5]
+ -- current match
+ if f == 1 and f == l then -- current only
+ -- already a hit
+ -- match = true
+ else -- before/current/after | before/current | current/after
+ -- no need to test first hit (to be optimized)
+ if f == l then -- new, else last out of sync (f is > 1)
+ -- match = true
+ else
+ local n = f + 1
+ last = last.next
+ while n <= l do
+ if last then
+ local id = last.id
+ if id == glyph_code then
+ if last.font == currentfont and last.subtype<256 then
+ local char = last.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ last = last.next
+ elseif seq[n][char] then
+ if n < l then
+ last = last.next
+ end
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ last = last.next
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ end
+ end
+ end
+ -- before
+ if match and f > 1 then
+ local prev = start.prev
+ if prev then
+ local n = f-1
+ while n >= 1 do
+ if prev then
+ local id = prev.id
+ if id == glyph_code then
+ if prev.font == currentfont and prev.subtype<256 then -- normal char
+ local char = prev.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n = n -1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ -- skip 'm
+ elseif seq[n][32] then
+ n = n -1
+ else
+ match = false
+ break
+ end
+ prev = prev.prev
+ elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
+ n = n -1
+ else
+ match = false
+ break
+ end
+ end
+ elseif f == 2 then
+ match = seq[1][32]
+ else
+ for n=f-1,1 do
+ if not seq[n][32] then
+ match = false
+ break
+ end
+ end
+ end
+ end
+ -- after
+ if match and s > l then
+ local current = last and last.next
+ if current then
+ -- removed optimization for s-l == 1, we have to deal with marks anyway
+ local n = l + 1
+ while n <= s do
+ if current then
+ local id = current.id
+ if id == glyph_code then
+ if current.font == currentfont and current.subtype<256 then -- normal char
+ local char = current.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ -- skip 'm
+ elseif seq[n][32] then -- brrr
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ current = current.next
+ elseif seq[n][32] then
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ end
+ elseif s-l == 1 then
+ match = seq[s][32]
+ else
+ for n=l+1,s do
+ if not seq[n][32] then
+ match = false
+ break
+ end
+ end
+ end
+ end
+ end
+ if match then
+ -- ck == currentcontext
+ if trace_contexts then
+ local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
+ local char = start.char
+ if ck[9] then
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ else
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ end
+ end
+ local chainlookups = ck[6]
+ if chainlookups then
+ local nofchainlookups = #chainlookups
+ -- we can speed this up if needed
+ if nofchainlookups == 1 then
+ local chainlookupname = chainlookups[1]
+ local chainlookup = lookuptable[chainlookupname]
+ if chainlookup then
+ local cp = chainprocs[chainlookup.type]
+ if cp then
+ local ok
+ head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ if ok then
+ done = true
+ end
+ else
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ end
+ else -- shouldn't happen
+ logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
+ end
+ else
+ local i = 1
+ repeat
+ if skipped then
+ while true do
+ local char = start.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ start = start.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ end
+ local chainlookupname = chainlookups[i]
+ local chainlookup = lookuptable[chainlookupname]
+ if not chainlookup then
+ -- okay, n matches, < n replacements
+ i = i + 1
+ else
+ local cp = chainmores[chainlookup.type]
+ if not cp then
+ -- actually an error
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ i = i + 1
+ else
+ local ok, n
+ head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ -- messy since last can be changed !
+ if ok then
+ done = true
+ -- skip next one(s) if ligature
+ i = i + (n or 1)
+ else
+ i = i + 1
+ end
+ end
+ end
+ if start then
+ start = start.next
+ else
+ -- weird
+ end
+ until i > nofchainlookups
+ end
+ else
+ local replacements = ck[7]
+ if replacements then
+ head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
+ else
+ done = true -- can be meant to be skipped
+ if trace_contexts then
+ logprocess("%s: skipping match",cref(kind,chainname))
+ end
+ end
+ end
+ end
+ end
+ return head, start, done
+end
+
+-- Because we want to keep this elsewhere (an because speed is less an issue) we
+-- pass the font id so that the verbose variant can access the relevant helper tables.
+
+local verbose_handle_contextchain = function(font,...)
+ logwarning("no verbose handler installed, reverting to 'normal'")
+ otf.setcontextchain()
+ return normal_handle_contextchain(...)
+end
+
+otf.chainhandlers = {
+ normal = normal_handle_contextchain,
+ verbose = verbose_handle_contextchain,
+}
+
+function otf.setcontextchain(method)
+ if not method or method == "normal" or not otf.chainhandlers[method] then
+ if handlers.contextchain then -- no need for a message while making the format
+ logwarning("installing normal contextchain handler")
+ end
+ handlers.contextchain = normal_handle_contextchain
+ else
+ logwarning("installing contextchain handler %a",method)
+ local handler = otf.chainhandlers[method]
+ handlers.contextchain = function(...)
+ return handler(currentfont,...) -- hm, get rid of ...
+ end
+ end
+ handlers.gsub_context = handlers.contextchain
+ handlers.gsub_contextchain = handlers.contextchain
+ handlers.gsub_reversecontextchain = handlers.contextchain
+ handlers.gpos_contextchain = handlers.contextchain
+ handlers.gpos_context = handlers.contextchain
+end
+
+otf.setcontextchain()
+
+local missing = { } -- we only report once
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_process(...)
+end
+
+local logwarning = report_process
+
+local function report_missing_cache(typ,lookup)
+ local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
+ local t = f[typ] if not t then t = { } f[typ] = t end
+ if not t[lookup] then
+ t[lookup] = true
+ logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
+ end
+end
+
+local resolved = { } -- we only resolve a font,script,language pair once
+
+-- todo: pass all these 'locals' in a table
+
+local lookuphashes = { }
+
+setmetatableindex(lookuphashes, function(t,font)
+ local lookuphash = fontdata[font].resources.lookuphash
+ if not lookuphash or not next(lookuphash) then
+ lookuphash = false
+ end
+ t[font] = lookuphash
+ return lookuphash
+end)
+
+-- fonts.hashes.lookups = lookuphashes
+
+local autofeatures = fonts.analyzers.features -- was: constants
+
+local function initialize(sequence,script,language,enabled)
+ local features = sequence.features
+ if features then
+ local order = sequence.order
+ if order then
+ for i=1,#order do
+ local kind = order[i] --
+ local valid = enabled[kind]
+ if valid then
+ local scripts = features[kind] --
+ local languages = scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence }
+ end
+ end
+ end
+ else
+ -- can't happen
+ end
+ end
+ return false
+end
+
+function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
+ local shared = tfmdata.shared
+ local properties = tfmdata.properties
+ local language = properties.language or "dflt"
+ local script = properties.script or "dflt"
+ local enabled = shared.features
+ local res = resolved[font]
+ if not res then
+ res = { }
+ resolved[font] = res
+ end
+ local rs = res[script]
+ if not rs then
+ rs = { }
+ res[script] = rs
+ end
+ local rl = rs[language]
+ if not rl then
+ rl = {
+ -- indexed but we can also add specific data by key
+ }
+ rs[language] = rl
+ local sequences = tfmdata.resources.sequences
+ for s=1,#sequences do
+ local v = enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1] = v
+ end
+ end
+ end
+ return rl
+end
+
+-- elseif id == glue_code then
+-- if p[5] then -- chain
+-- local pc = pp[32]
+-- if pc then
+-- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4])
+-- if ok then
+-- done = true
+-- end
+-- if start then start = start.next end
+-- else
+-- start = start.next
+-- end
+-- else
+-- start = start.next
+-- end
+
+-- there will be a new direction parser (pre-parsed etc)
+
+-- less bytecode: 290 -> 254
+--
+-- attr = attr or false
+--
+-- local a = getattr(start,0)
+-- if (a == attr and (not attribute or getattr(start,a_state) == attribute)) or (not attribute or getattr(start,a_state) == attribute) then
+-- -- the action
+-- end
+
+local function featuresprocessor(head,font,attr)
+
+ local lookuphash = lookuphashes[font] -- we can also check sequences here
+
+ if not lookuphash then
+ return head, false
+ end
+
+ if trace_steps then
+ checkstep(head)
+ end
+
+ tfmdata = fontdata[font]
+ descriptions = tfmdata.descriptions
+ characters = tfmdata.characters
+ resources = tfmdata.resources
+
+ marks = resources.marks
+ anchorlookups = resources.lookup_to_anchor
+ lookuptable = resources.lookups
+ lookuptypes = resources.lookuptypes
+
+ currentfont = font
+ rlmode = 0
+
+ local sequences = resources.sequences
+ local done = false
+ local datasets = otf.dataset(tfmdata,font,attr)
+
+ local dirstack = { } -- could move outside function
+
+ -- We could work on sub start-stop ranges instead but I wonder if there is that
+ -- much speed gain (experiments showed that it made not much sense) and we need
+ -- to keep track of directions anyway. Also at some point I want to play with
+ -- font interactions and then we do need the full sweeps.
+
+ -- Keeping track of the headnode is needed for devanagari (I generalized it a bit
+ -- so that multiple cases are also covered.)
+
+ for s=1,#datasets do
+ local dataset = datasets[s]
+ featurevalue = dataset[1] -- todo: pass to function instead of using a global
+
+ local sequence = dataset[5] -- sequences[s] -- also dataset[5]
+ local rlparmode = 0
+ local topstack = 0
+ local success = false
+ local attribute = dataset[2]
+ local chain = dataset[3] -- sequence.chain or 0
+ local typ = sequence.type
+ local subtables = sequence.subtables
+ if chain < 0 then
+ -- this is a limited case, no special treatments like 'init' etc
+ local handler = handlers[typ]
+ -- we need to get rid of this slide! probably no longer needed in latest luatex
+ local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
+ while start do
+ local id = start.id
+ if id == glyph_code then
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
+ if a then
+ a = a == attr
+ else
+ a = true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = start.prev end
+ else
+ start = start.prev
+ end
+ else
+ start = start.prev
+ end
+ else
+ start = start.prev
+ end
+ end
+ else
+ local handler = handlers[typ]
+ local ns = #subtables
+ local start = head -- local ?
+ rlmode = 0 -- to be checked ?
+ if ns == 1 then -- happens often
+ local lookupname = subtables[1]
+ local lookupcache = lookuphash[lookupname]
+ if not lookupcache then -- also check for empty cache
+ report_missing_cache(typ,lookupname)
+ else
+
+ local function subrun(start)
+ -- mostly for gsub, gpos would demand a more clever approach
+ local head = start
+ local done = false
+ while start do
+ local id = start.id
+ if id == glyph_code and start.font == font and start.subtype <256 then
+ local a = start[0]
+ if a then
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
+ else
+ a = not attribute or start[a_state] == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ end
+ end
+ if start then start = start.next end
+ else
+ start = start.next
+ end
+ else
+ start = start.next
+ end
+ end
+ if done then
+ success = true
+ return head
+ end
+ end
+
+ local function kerndisc(disc) -- we can assume that prev and next are glyphs
+ local prev = disc.prev
+ local next = disc.next
+ if prev and next then
+ prev.next = next
+ -- next.prev = prev
+ local a = prev[0]
+ if a then
+ a = (a == attr) and (not attribute or prev[a_state] == attribute)
+ else
+ a = not attribute or prev[a_state] == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[prev.char]
+ if lookupmatch then
+ -- sequence kan weg
+ local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ success = true
+ end
+ end
+ end
+ prev.next = disc
+ -- next.prev = disc
+ end
+ return next
+ end
+
+ while start do
+ local id = start.id
+ if id == glyph_code then
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
+ if a then
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
+ else
+ a = not attribute or start[a_state] == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success = true
+ end
+ end
+ if start then start = start.next end
+ else
+ start = start.next
+ end
+ else
+ start = start.next
+ end
+ elseif id == disc_code then
+ -- mostly for gsub
+ if start.subtype == discretionary_code then
+ local pre = start.pre
+ if pre then
+ local new = subrun(pre)
+ if new then start.pre = new end
+ end
+ local post = start.post
+ if post then
+ local new = subrun(post)
+ if new then start.post = new end
+ end
+ local replace = start.replace
+ if replace then
+ local new = subrun(replace)
+ if new then start.replace = new end
+ end
+elseif typ == "gpos_single" or typ == "gpos_pair" then
+ kerndisc(start)
+ end
+ start = start.next
+ elseif id == whatsit_code then -- will be function
+ local subtype = start.subtype
+ if subtype == dir_code then
+ local dir = start.dir
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype == localpar_code then
+ local dir = start.dir
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ -- one might wonder if the par dir should be looked at, so we might as well drop the next line
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start = start.next
+ elseif id == math_code then
+ start = end_of_math(start).next
+ else
+ start = start.next
+ end
+ end
+ end
+ else
+
+ local function subrun(start)
+ -- mostly for gsub, gpos would demand a more clever approach
+ local head = start
+ local done = false
+ while start do
+ local id = start.id
+ if id == glyph_code and start.id == font and start.subtype <256 then
+ local a = start[0]
+ if a then
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
+ else
+ a = not attribute or start[a_state] == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = start.next end
+ else
+ start = start.next
+ end
+ else
+ start = start.next
+ end
+ end
+ if done then
+ success = true
+ return head
+ end
+ end
+
+ local function kerndisc(disc) -- we can assume that prev and next are glyphs
+ local prev = disc.prev
+ local next = disc.next
+ if prev and next then
+ prev.next = next
+ -- next.prev = prev
+ local a = prev[0]
+ if a then
+ a = (a == attr) and (not attribute or prev[a_state] == attribute)
+ else
+ a = not attribute or prev[a_state] == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[prev.char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ end
+ prev.next = disc
+ -- next.prev = disc
+ end
+ return next
+ end
+
+ while start do
+ local id = start.id
+ if id == glyph_code then
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
+ if a then
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
+ else
+ a = not attribute or start[a_state] == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = start.next end
+ else
+ start = start.next
+ end
+ else
+ start = start.next
+ end
+ elseif id == disc_code then
+ -- mostly for gsub
+ if start.subtype == discretionary_code then
+ local pre = start.pre
+ if pre then
+ local new = subrun(pre)
+ if new then start.pre = new end
+ end
+ local post = start.post
+ if post then
+ local new = subrun(post)
+ if new then start.post = new end
+ end
+ local replace = start.replace
+ if replace then
+ local new = subrun(replace)
+ if new then start.replace = new end
+ end
+elseif typ == "gpos_single" or typ == "gpos_pair" then
+ kerndisc(start)
+ end
+ start = start.next
+ elseif id == whatsit_code then
+ local subtype = start.subtype
+ if subtype == dir_code then
+ local dir = start.dir
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype == localpar_code then
+ local dir = start.dir
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start = start.next
+ elseif id == math_code then
+ start = end_of_math(start).next
+ else
+ start = start.next
+ end
+ end
+ end
+ end
+ if success then
+ done = true
+ end
+ if trace_steps then -- ?
+ registerstep(head)
+ end
+ end
+ return head, done
+end
+
+local function generic(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if target then
+ target[unicode] = lookupdata
+ else
+ lookuphash[lookupname] = { [unicode] = lookupdata }
+ end
+end
+
+local action = {
+
+ substitution = generic,
+ multiple = generic,
+ alternate = generic,
+ position = generic,
+
+ ligature = function(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if not target then
+ target = { }
+ lookuphash[lookupname] = target
+ end
+ for i=1,#lookupdata do
+ local li = lookupdata[i]
+ local tu = target[li]
+ if not tu then
+ tu = { }
+ target[li] = tu
+ end
+ target = tu
+ end
+ target.ligature = unicode
+ end,
+
+ pair = function(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if not target then
+ target = { }
+ lookuphash[lookupname] = target
+ end
+ local others = target[unicode]
+ local paired = lookupdata[1]
+ if others then
+ others[paired] = lookupdata
+ else
+ others = { [paired] = lookupdata }
+ target[unicode] = others
+ end
+ end,
+
+}
+
+local function prepare_lookups(tfmdata)
+
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata.resources
+ local lookuphash = resources.lookuphash
+ local anchor_to_lookup = resources.anchor_to_lookup
+ local lookup_to_anchor = resources.lookup_to_anchor
+ local lookuptypes = resources.lookuptypes
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+
+ -- we cannot free the entries in the descriptions as sometimes we access
+ -- then directly (for instance anchors) ... selectively freeing does save
+ -- much memory as it's only a reference to a table and the slot in the
+ -- description hash is not freed anyway
+
+ for unicode, character in next, characters do -- we cannot loop over descriptions !
+
+ local description = descriptions[unicode]
+
+ if description then
+
+ local lookups = description.slookups
+ if lookups then
+ for lookupname, lookupdata in next, lookups do
+ action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+
+ local lookups = description.mlookups
+ if lookups then
+ for lookupname, lookuplist in next, lookups do
+ local lookuptype = lookuptypes[lookupname]
+ for l=1,#lookuplist do
+ local lookupdata = lookuplist[l]
+ action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ end
+
+ local list = description.kerns
+ if list then
+ for lookup, krn in next, list do -- ref to glyph, saves lookup
+ local target = lookuphash[lookup]
+ if target then
+ target[unicode] = krn
+ else
+ lookuphash[lookup] = { [unicode] = krn }
+ end
+ end
+ end
+
+ local list = description.anchors
+ if list then
+ for typ, anchors in next, list do -- types
+ if typ == "mark" or typ == "cexit" then -- or entry?
+ for name, anchor in next, anchors do
+ local lookups = anchor_to_lookup[name]
+ if lookups then
+ for lookup, _ in next, lookups do
+ local target = lookuphash[lookup]
+ if target then
+ target[unicode] = anchors
+ else
+ lookuphash[lookup] = { [unicode] = anchors }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ end
+
+ end
+
+end
+
+local function split(replacement,original)
+ local result = { }
+ for i=1,#replacement do
+ result[original[i]] = replacement[i]
+ end
+ return result
+end
+
+local valid = {
+ coverage = { chainsub = true, chainpos = true, contextsub = true },
+ reversecoverage = { reversesub = true },
+ glyphs = { chainsub = true, chainpos = true },
+}
+
+local function prepare_contextchains(tfmdata)
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata.resources
+ local lookuphash = resources.lookuphash
+ local lookups = rawdata.lookups
+ if lookups then
+ for lookupname, lookupdata in next, rawdata.lookups do
+ local lookuptype = lookupdata.type
+ if lookuptype then
+ local rules = lookupdata.rules
+ if rules then
+ local format = lookupdata.format
+ local validformat = valid[format]
+ if not validformat then
+ report_prepare("unsupported format %a",format)
+ elseif not validformat[lookuptype] then
+ -- todo: dejavu-serif has one (but i need to see what use it has)
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
+ else
+ local contexts = lookuphash[lookupname]
+ if not contexts then
+ contexts = { }
+ lookuphash[lookupname] = contexts
+ end
+ local t, nt = { }, 0
+ for nofrules=1,#rules do
+ local rule = rules[nofrules]
+ local current = rule.current
+ local before = rule.before
+ local after = rule.after
+ local replacements = rule.replacements
+ local sequence = { }
+ local nofsequences = 0
+ -- Eventually we can store start, stop and sequence in the cached file
+ -- but then less sharing takes place so best not do that without a lot
+ -- of profiling so let's forget about it.
+ if before then
+ for n=1,#before do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = before[n]
+ end
+ end
+ local start = nofsequences + 1
+ for n=1,#current do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = current[n]
+ end
+ local stop = nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = after[n]
+ end
+ end
+ if sequence[1] then
+ -- Replacements only happen with reverse lookups as they are single only. We
+ -- could pack them into current (replacement value instead of true) and then
+ -- use sequence[start] instead but it's somewhat ugly.
+ nt = nt + 1
+ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
+ for unic, _ in next, sequence[start] do
+ local cu = contexts[unic]
+ if not cu then
+ contexts[unic] = t
+ end
+ end
+ end
+ end
+ end
+ else
+ -- no rules
+ end
+ else
+ report_prepare("missing lookuptype for lookupname %a",lookupname)
+ end
+ end
+ end
+end
+
+-- we can consider lookuphash == false (initialized but empty) vs lookuphash == table
+
+local function featuresinitializer(tfmdata,value)
+ if true then -- value then
+ -- beware we need to use the topmost properties table
+ local rawdata = tfmdata.shared.rawdata
+ local properties = rawdata.properties
+ if not properties.initialized then
+ local starttime = trace_preparing and os.clock()
+ local resources = rawdata.resources
+ resources.lookuphash = resources.lookuphash or { }
+ prepare_contextchains(tfmdata)
+ prepare_lookups(tfmdata)
+ properties.initialized = true
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
+ end
+ end
+ end
+end
+
+registerotffeature {
+ name = "features",
+ description = "features",
+ default = true,
+ initializers = {
+ position = 1,
+ node = featuresinitializer,
+ },
+ processors = {
+ node = featuresprocessor,
+ }
+}
+
+-- This can be used for extra handlers, but should be used with care!
+
+otf.handlers = handlers
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-syn.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-syn.lua
index ea6e3cab5ee..f03d558bfb4 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-syn.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-syn.lua
@@ -100,3 +100,7 @@ fonts.names.resolvespec = fonts.names.resolve -- only supported in mkiv
function fonts.names.getfilename(askedname,suffix) -- only supported in mkiv
return ""
end
+
+function fonts.names.ignoredfile(filename) -- only supported in mkiv
+ return false -- will be overloaded
+end
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua
index 89592fcacf8..5e5c9a4cfa6 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua
@@ -192,7 +192,7 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then
-- with context. The mtx-fonts script can be used to genate this file (using the --names option).
-- In 2013/14 I will merge/move some generic files into luatex-fonts-* files (copies) so that
- -- intermediate updates of context not interfere. We can then also use the general merger and
+ -- intermediate updates of context don't interfere. We can then also use the general merger and
-- consider stripping debug code.
loadmodule('font-ini.lua')
@@ -201,15 +201,19 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then
loadmodule('font-cid.lua')
loadmodule('font-map.lua') -- for loading lum file (will be stripped)
loadmodule('luatex-fonts-syn.lua') -- deals with font names (synonyms)
+ -- begin of test
+ loadmodule('font-tfm.lua') -- optional
+ loadmodule('font-afm.lua') -- optional
+ loadmodule('font-afk.lua') -- optional
+ -- end of test
loadmodule('luatex-fonts-tfm.lua')
loadmodule('font-oti.lua')
loadmodule('font-otf.lua')
loadmodule('font-otb.lua')
- loadmodule('node-inj.lua') -- will be replaced (luatex >= .70)
+ loadmodule('luatex-fonts-inj.lua') -- will be replaced (luatex >= .80)
loadmodule('font-ota.lua')
- loadmodule('font-otn.lua')
+ loadmodule('luatex-fonts-otn.lua')
loadmodule('font-otp.lua') -- optional
- ----------('luatex-fonts-chr.lua')
loadmodule('luatex-fonts-lua.lua')
loadmodule('font-def.lua')
loadmodule('luatex-fonts-def.lua')
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.tex b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.tex
index a7c8bc2b8fd..7b457e9b4fc 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.tex
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.tex
@@ -132,7 +132,9 @@
%D and the \CONTEXT\ wiki.
\directlua {
- dofile(kpse.find_file("luatex-fonts.lua","tex"))
+ if not fonts then
+ dofile(kpse.find_file("luatex-fonts.lua","tex"))
+ end
}
\endinput
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex b/Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex
index fcc837e7041..fbf8ce3cf2c 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex
@@ -80,4 +80,8 @@ $$\left( { {1} \over { {1} \over {x} } } \right) $$
$$\sqrt {2} { { {1} \over { {1} \over {x} } } } $$
+\font\cows=file:koeieletters.afm at 50pt
+
+\cows Hello World!
+
\end