summaryrefslogtreecommitdiff
path: root/Master/texmf-dist/tex/generic/context
diff options
context:
space:
mode:
Diffstat (limited to 'Master/texmf-dist/tex/generic/context')
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex-fonts-merged.lua4193
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex-fonts.lua2
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex-mplib.lua6
-rw-r--r--Master/texmf-dist/tex/generic/context/luatex-test.tex6
-rw-r--r--Master/texmf-dist/tex/generic/context/m-ch-en.tex4
5 files changed, 2679 insertions, 1532 deletions
diff --git a/Master/texmf-dist/tex/generic/context/luatex-fonts-merged.lua b/Master/texmf-dist/tex/generic/context/luatex-fonts-merged.lua
index 3834afce5d3..da81735ff23 100644
--- a/Master/texmf-dist/tex/generic/context/luatex-fonts-merged.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex-fonts-merged.lua
@@ -1,18 +1,21 @@
--- merged file : c:/data/develop/context/texmf/tex/generic/context/luatex-fonts-merged.lua
--- parent file : c:/data/develop/context/texmf/tex/generic/context/luatex-fonts.lua
--- merge date : 08/19/09 17:12:50
+-- merged file : luatex-fonts-merged.lua
+-- parent file : luatex-fonts.lua
+-- merge date : 05/24/10 13:05:12
do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['l-string'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-local sub, gsub, find, match, gmatch, format, char, byte, rep = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep
+local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
+local lpegmatch = lpeg.match
+
+-- some functions may disappear as they are not used anywhere
if not string.split then
@@ -52,8 +55,16 @@ function string:unquote()
return (gsub(self,"^([\"\'])(.*)%1$","%2"))
end
+--~ function string:unquote()
+--~ if find(self,"^[\'\"]") then
+--~ return sub(self,2,-2)
+--~ else
+--~ return self
+--~ end
+--~ end
+
function string:quote() -- we could use format("%q")
- return '"' .. self:unquote() .. '"'
+ return format("%q",self)
end
function string:count(pattern) -- variant 3
@@ -73,12 +84,23 @@ function string:limit(n,sentinel)
end
end
-function string:strip()
- return (gsub(self,"^%s*(.-)%s*$", "%1"))
+--~ function string:strip() -- the .- is quite efficient
+--~ -- return match(self,"^%s*(.-)%s*$") or ""
+--~ -- return match(self,'^%s*(.*%S)') or '' -- posted on lua list
+--~ return find(s,'^%s*$') and '' or match(s,'^%s*(.*%S)')
+--~ end
+
+do -- roberto's variant:
+ local space = lpeg.S(" \t\v\n")
+ local nospace = 1 - space
+ local stripper = space^0 * lpeg.C((space^0 * nospace^1)^0)
+ function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+ end
end
function string:is_empty()
- return not find(find,"%S")
+ return not find(self,"%S")
end
function string:enhance(pattern,action)
@@ -112,14 +134,14 @@ if not string.characters then
local function nextchar(str, index)
index = index + 1
- return (index <= #str) and index or nil, str:sub(index,index)
+ return (index <= #str) and index or nil, sub(str,index,index)
end
function string:characters()
return nextchar, self, 0
end
local function nextbyte(str, index)
index = index + 1
- return (index <= #str) and index or nil, byte(str:sub(index,index))
+ return (index <= #str) and index or nil, byte(sub(str,index,index))
end
function string:bytes()
return nextbyte, self, 0
@@ -132,7 +154,7 @@ end
function string:rpadd(n,chr)
local m = n-#self
if m > 0 then
- return self .. self.rep(chr or " ",m)
+ return self .. rep(chr or " ",m)
else
return self
end
@@ -141,7 +163,7 @@ end
function string:lpadd(n,chr)
local m = n-#self
if m > 0 then
- return self.rep(chr or " ",m) .. self
+ return rep(chr or " ",m) .. self
else
return self
end
@@ -189,6 +211,17 @@ function string:pattesc()
return (gsub(self,".",patterns_escapes))
end
+local simple_escapes = {
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+function string:simpleesc()
+ return (gsub(self,".",simple_escapes))
+end
+
function string:tohash()
local t = { }
for s in gmatch(self,"([^, ]+)") do -- lpeg
@@ -200,10 +233,10 @@ end
local pattern = lpeg.Ct(lpeg.C(1)^0)
function string:totable()
- return pattern:match(self)
+ return lpegmatch(pattern,self)
end
---~ for _, str in ipairs {
+--~ local t = {
--~ "1234567123456712345671234567",
--~ "a\tb\tc",
--~ "aa\tbb\tcc",
@@ -211,7 +244,10 @@ end
--~ "aaaa\tbbbb\tcccc",
--~ "aaaaa\tbbbbb\tccccc",
--~ "aaaaaa\tbbbbbb\tcccccc",
---~ } do print(string.tabtospace(str)) end
+--~ }
+--~ for k,v do
+--~ print(string.tabtospace(t[k]))
+--~ end
function string.tabtospace(str,tab)
-- we don't handle embedded newlines
@@ -219,7 +255,7 @@ function string.tabtospace(str,tab)
local s = find(str,"\t")
if s then
if not tab then tab = 7 end -- only when found
- local d = tab-(s-1)%tab
+ local d = tab-(s-1) % tab
if d > 0 then
str = gsub(str,"\t",rep(" ",d),1)
else
@@ -238,64 +274,89 @@ function string:compactlong() -- strips newlines and leading spaces
return self
end
+function string:striplong() -- strips newlines and leading spaces
+ self = gsub(self,"^%s*","")
+ self = gsub(self,"[\n\r]+ *","\n")
+ return self
+end
+
+function string:topattern(lowercase,strict)
+ if lowercase then
+ self = lower(self)
+ end
+ self = gsub(self,".",simple_escapes)
+ if self == "" then
+ self = ".*"
+ elseif strict then
+ self = "^" .. self .. "$"
+ end
+ return self
+end
+
end -- closure
do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['l-lpeg'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-local P, S, Ct, C, Cs, Cc = lpeg.P, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-
---~ l-lpeg.lua :
-
---~ lpeg.digit = lpeg.R('09')^1
---~ lpeg.sign = lpeg.S('+-')^1
---~ lpeg.cardinal = lpeg.P(lpeg.sign^0 * lpeg.digit^1)
---~ lpeg.integer = lpeg.P(lpeg.sign^0 * lpeg.digit^1)
---~ lpeg.float = lpeg.P(lpeg.sign^0 * lpeg.digit^0 * lpeg.P('.') * lpeg.digit^1)
---~ lpeg.number = lpeg.float + lpeg.integer
---~ lpeg.oct = lpeg.P("0") * lpeg.R('07')^1
---~ lpeg.hex = lpeg.P("0x") * (lpeg.R('09') + lpeg.R('AF'))^1
---~ lpeg.uppercase = lpeg.P("AZ")
---~ lpeg.lowercase = lpeg.P("az")
-
---~ lpeg.eol = lpeg.S('\r\n\f')^1 -- includes formfeed
---~ lpeg.space = lpeg.S(' ')^1
---~ lpeg.nonspace = lpeg.P(1-lpeg.space)^1
---~ lpeg.whitespace = lpeg.S(' \r\n\f\t')^1
---~ lpeg.nonwhitespace = lpeg.P(1-lpeg.whitespace)^1
-
-local hash = { }
+local lpeg = require("lpeg")
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, Ct, C, Cs, Cc, V = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.V
+local match = lpeg.match
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local utf8byte = R("\128\191")
+
+patterns.utf8byte = utf8byte
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8byte
+patterns.utf8three = R("\224\239") * utf8byte * utf8byte
+patterns.utf8four = R("\240\244") * utf8byte * utf8byte * utf8byte
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = S(" ")
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = crlf + cr + lf
+patterns.nonspace = 1 - patterns.space
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191')
function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * lpeg.V(1) }
-end
-
-function lpeg.startswith(pattern) --slightly adapted
- return P(pattern)
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
end
function lpeg.splitter(pattern, action)
return (((1-P(pattern))^1)/action+1)^0
end
--- variant:
-
---~ local parser = lpeg.Ct(lpeg.splitat(newline))
-
-local crlf = P("\r\n")
-local cr = P("\r")
-local lf = P("\n")
-local space = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-local newline = crlf + cr + lf
-local spacing = space^0 * newline
-
+local spacing = patterns.spacer^0 * patterns.newline -- sort of strip
local empty = spacing * Cc("")
local nonempty = Cs((1-spacing)^1) * spacing^-1
local content = (empty + nonempty)^1
@@ -303,15 +364,15 @@ local content = (empty + nonempty)^1
local capture = Ct(content^0)
function string:splitlines()
- return capture:match(self)
+ return match(capture,self)
end
-lpeg.linebyline = content -- better make a sublibrary
+patterns.textline = content
---~ local p = lpeg.splitat("->",false) print(p:match("oeps->what->more")) -- oeps what more
---~ local p = lpeg.splitat("->",true) print(p:match("oeps->what->more")) -- oeps what->more
---~ local p = lpeg.splitat("->",false) print(p:match("oeps")) -- oeps
---~ local p = lpeg.splitat("->",true) print(p:match("oeps")) -- oeps
+--~ local p = lpeg.splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more
+--~ local p = lpeg.splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more
+--~ local p = lpeg.splitat("->",false) print(match(p,"oeps")) -- oeps
+--~ local p = lpeg.splitat("->",true) print(match(p,"oeps")) -- oeps
local splitters_s, splitters_m = { }, { }
@@ -321,7 +382,7 @@ local function splitat(separator,single)
separator = P(separator)
if single then
local other, any = C((1 - separator)^0), P(1)
- splitter = other * (separator * C(any^0) + "")
+ splitter = other * (separator * C(any^0) + "") -- ?
splitters_s[separator] = splitter
else
local other = C((1 - separator)^0)
@@ -336,22 +397,79 @@ lpeg.splitat = splitat
local cache = { }
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = Ct(splitat(separator))
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
function string:split(separator)
local c = cache[separator]
if not c then
c = Ct(splitat(separator))
cache[separator] = c
end
- return c:match(self)
+ return match(c,self)
+end
+
+lpeg.splitters = cache
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string:checkedsplit(separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,self)
end
+--~ function lpeg.append(list,pp)
+--~ local p = pp
+--~ for l=1,#list do
+--~ if p then
+--~ p = p + P(list[l])
+--~ else
+--~ p = P(list[l])
+--~ end
+--~ end
+--~ return p
+--~ end
+
+--~ from roberto's site:
+
+local f1 = string.byte
+
+local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
end -- closure
do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['l-boolean'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -411,7 +529,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['l-math'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -457,7 +575,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -466,9 +584,58 @@ if not modules then modules = { } end modules ['l-table'] = {
table.join = table.concat
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
-local format, find, gsub, lower, dump = string.format, string.find, string.gsub, string.lower, string.dump
+local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
-local type, next, tostring, ipairs = type, next, tostring, ipairs
+local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs
+
+-- Starting with version 5.2 Lua no longer provide ipairs, which makes
+-- sense. As we already used the for loop and # in most places the
+-- impact on ConTeXt was not that large; the remaining ipairs already
+-- have been replaced. In a similar fashio we also hardly used pairs.
+--
+-- Just in case, we provide the fallbacks as discussed in Programming
+-- in Lua (http://www.lua.org/pil/7.3.html):
+
+if not ipairs then
+
+ -- for k, v in ipairs(t) do ... end
+ -- for k=1,#t do local v = t[k] ... end
+
+ local function iterate(a,i)
+ i = i + 1
+ local v = a[i]
+ if v ~= nil then
+ return i, v --, nil
+ end
+ end
+
+ function ipairs(a)
+ return iterate, a, 0
+ end
+
+end
+
+if not pairs then
+
+ -- for k, v in pairs(t) do ... end
+ -- for k, v in next, t do ... end
+
+ function pairs(t)
+ return next, t -- , nil
+ end
+
+end
+
+-- Also, unpack has been moved to the table table, and for compatiility
+-- reasons we provide both now.
+
+if not table.unpack then
+ table.unpack = _G.unpack
+elseif not unpack then
+ _G.unpack = table.unpack
+end
+
+-- extra functions, some might go (when not used)
function table.strip(tab)
local lst = { }
@@ -483,6 +650,14 @@ function table.strip(tab)
return lst
end
+function table.keys(t)
+ local k = { }
+ for key, _ in next, t do
+ k[#k+1] = key
+ end
+ return k
+end
+
local function compare(a,b)
return (tostring(a) < tostring(b))
end
@@ -526,7 +701,7 @@ end
table.sortedkeys = sortedkeys
table.sortedhashkeys = sortedhashkeys
-function table.sortedpairs(t)
+function table.sortedhash(t)
local s = sortedhashkeys(t) -- maybe just sortedkeys
local n = 0
local function kv(s)
@@ -537,6 +712,8 @@ function table.sortedpairs(t)
return kv, s
end
+table.sortedpairs = table.sortedhash
+
function table.append(t, list)
for _,v in next, list do
insert(t,v)
@@ -645,7 +822,7 @@ end
table.fastcopy = fastcopy
table.copy = copy
--- rougly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
+-- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
function table.sub(t,i,j)
return { unpack(t,i,j) }
@@ -659,18 +836,18 @@ end
-- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
-function table.is_empty(t)
+function table.is_empty(t) -- obolete, use inline code instead
return not t or not next(t)
end
-function table.one_entry(t)
+function table.one_entry(t) -- obolete, use inline code instead
local n = next(t)
return n and not next(t,n)
end
-function table.starts_at(t)
- return ipairs(t,1)(t,0)
-end
+--~ function table.starts_at(t) -- obsolete, not nice anyway
+--~ return ipairs(t,1)(t,0)
+--~ end
function table.tohash(t,value)
local h = { }
@@ -748,6 +925,8 @@ end
--
-- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) )
+-- problem: there no good number_to_string converter with the best resolution
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -770,8 +949,9 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s{",depth))
end
end
+ -- we could check for k (index) being number (cardinal)
if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here
+ local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
if compact then
-- NOT: for k=1,#root do (we need to quit at nil)
for k,v in ipairs(root) do -- can we use next?
@@ -792,10 +972,10 @@ local function do_serialize(root,name,depth,level,indexed)
if hexify then
handle(format("%s 0x%04X,",depth,v))
else
- handle(format("%s %s,",depth,v))
+ handle(format("%s %s,",depth,v)) -- %.99g
end
elseif t == "string" then
- if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then
+ if reduce and tonumber(v) then
handle(format("%s %s,",depth,v))
else
handle(format("%s %q,",depth,v))
@@ -832,29 +1012,29 @@ local function do_serialize(root,name,depth,level,indexed)
--~ if hexify then
--~ handle(format("%s %s=0x%04X,",depth,key(k),v))
--~ else
- --~ handle(format("%s %s=%s,",depth,key(k),v))
+ --~ handle(format("%s %s=%s,",depth,key(k),v)) -- %.99g
--~ end
if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
else
- handle(format("%s [%s]=%s,",depth,k,v))
+ handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g
end
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
if hexify then
handle(format("%s %s=0x%04X,",depth,k,v))
else
- handle(format("%s %s=%s,",depth,k,v))
+ handle(format("%s %s=%s,",depth,k,v)) -- %.99g
end
else
if hexify then
handle(format("%s [%q]=0x%04X,",depth,k,v))
else
- handle(format("%s [%q]=%s,",depth,k,v))
+ handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g
end
end
elseif t == "string" then
- if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then
+ if reduce and tonumber(v) then
--~ handle(format("%s %s=%s,",depth,key(k),v))
if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
@@ -1063,7 +1243,7 @@ function table.tofile(filename,root,name,reduce,noquotes,hexify)
end
end
-local function flatten(t,f,complete)
+local function flatten(t,f,complete) -- is this used? meybe a variant with next, ...
for i=1,#t do
local v = t[i]
if type(v) == "table" then
@@ -1092,6 +1272,24 @@ end
table.flatten_one_level = table.unnest
+-- a better one:
+
+local function flattened(t,f)
+ if not f then
+ f = { }
+ end
+ for k, v in next, t do
+ if type(v) == "table" then
+ flattened(v,f)
+ else
+ f[k] = v
+ end
+ end
+ return f
+end
+
+table.flattened = flattened
+
-- the next three may disappear
function table.remove_value(t,value) -- todo: n
@@ -1227,7 +1425,7 @@ function table.clone(t,p) -- t is optional or nil or table
elseif not t then
t = { }
end
- setmetatable(t, { __index = function(_,key) return p[key] end })
+ setmetatable(t, { __index = function(_,key) return p[key] end }) -- why not __index = p ?
return t
end
@@ -1255,21 +1453,36 @@ function table.reverse(t)
return tt
end
---~ function table.keys(t)
---~ local k = { }
---~ for k,_ in next, t do
---~ k[#k+1] = k
---~ end
---~ return k
---~ end
+function table.insert_before_value(t,value,extra)
+ for i=1,#t do
+ if t[i] == extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i] == value then
+ insert(t,i,extra)
+ return
+ end
+ end
+ insert(t,1,extra)
+end
+
+function table.insert_after_value(t,value,extra)
+ for i=1,#t do
+ if t[i] == extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i] == value then
+ insert(t,i+1,extra)
+ return
+ end
+ end
+ insert(t,#t+1,extra)
+end
---~ function table.keys_as_string(t)
---~ local k = { }
---~ for k,_ in next, t do
---~ k[#k+1] = k
---~ end
---~ return concat(k,"")
---~ end
end -- closure
@@ -1277,7 +1490,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['l-file'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -1288,14 +1501,17 @@ if not modules then modules = { } end modules ['l-file'] = {
file = file or { }
local concat = table.concat
-local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
+local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char
+local lpegmatch = lpeg.match
function file.removesuffix(filename)
return (gsub(filename,"%.[%a%d]+$",""))
end
function file.addsuffix(filename, suffix)
- if not find(filename,"%.[%a%d]+$") then
+ if not suffix or suffix == "" then
+ return filename
+ elseif not find(filename,"%.[%a%d]+$") then
return filename .. "." .. suffix
else
return filename
@@ -1318,20 +1534,39 @@ function file.nameonly(name)
return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
end
-function file.extname(name)
- return match(name,"^.+%.([^/\\]-)$") or ""
+function file.extname(name,default)
+ return match(name,"^.+%.([^/\\]-)$") or default or ""
end
file.suffix = file.extname
---~ print(file.join("x/","/y"))
---~ print(file.join("http://","/y"))
---~ print(file.join("http://a","/y"))
---~ print(file.join("http:///a","/y"))
---~ print(file.join("//nas-1","/y"))
+--~ function file.join(...)
+--~ local pth = concat({...},"/")
+--~ pth = gsub(pth,"\\","/")
+--~ local a, b = match(pth,"^(.*://)(.*)$")
+--~ if a and b then
+--~ return a .. gsub(b,"//+","/")
+--~ end
+--~ a, b = match(pth,"^(//)(.*)$")
+--~ if a and b then
+--~ return a .. gsub(b,"//+","/")
+--~ end
+--~ return (gsub(pth,"//+","/"))
+--~ end
+
+local trick_1 = char(1)
+local trick_2 = "^" .. trick_1 .. "/+"
function file.join(...)
- local pth = concat({...},"/")
+ local lst = { ... }
+ local a, b = lst[1], lst[2]
+ if a == "" then
+ lst[1] = trick_1
+ elseif b and find(a,"^/+$") and find(b,"^/") then
+ lst[1] = ""
+ lst[2] = gsub(b,"^/+","")
+ end
+ local pth = concat(lst,"/")
pth = gsub(pth,"\\","/")
local a, b = match(pth,"^(.*://)(.*)$")
if a and b then
@@ -1341,17 +1576,28 @@ function file.join(...)
if a and b then
return a .. gsub(b,"//+","/")
end
+ pth = gsub(pth,trick_2,"")
return (gsub(pth,"//+","/"))
end
+--~ print(file.join("//","/y"))
+--~ print(file.join("/","/y"))
+--~ print(file.join("","/y"))
+--~ print(file.join("/x/","/y"))
+--~ print(file.join("x/","/y"))
+--~ print(file.join("http://","/y"))
+--~ print(file.join("http://a","/y"))
+--~ print(file.join("http:///a","/y"))
+--~ print(file.join("//nas-1","/y"))
+
function file.iswritable(name)
local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,"."))
- return a and a.permissions:sub(2,2) == "w"
+ return a and sub(a.permissions,2,2) == "w"
end
function file.isreadable(name)
local a = lfs.attributes(name)
- return a and a.permissions:sub(1,1) == "r"
+ return a and sub(a.permissions,1,1) == "r"
end
file.is_readable = file.isreadable
@@ -1359,36 +1605,50 @@ file.is_writable = file.iswritable
-- todo: lpeg
-function file.split_path(str)
- local t = { }
- str = gsub(str,"\\", "/")
- str = gsub(str,"(%a):([;/])", "%1\001%2")
- for name in gmatch(str,"([^;:]+)") do
- if name ~= "" then
- t[#t+1] = gsub(name,"\001",":")
- end
- end
- return t
+--~ function file.split_path(str)
+--~ local t = { }
+--~ str = gsub(str,"\\", "/")
+--~ str = gsub(str,"(%a):([;/])", "%1\001%2")
+--~ for name in gmatch(str,"([^;:]+)") do
+--~ if name ~= "" then
+--~ t[#t+1] = gsub(name,"\001",":")
+--~ end
+--~ end
+--~ return t
+--~ end
+
+local checkedsplit = string.checkedsplit
+
+function file.split_path(str,separator)
+ str = gsub(str,"\\","/")
+ return checkedsplit(str,separator or io.pathseparator)
end
function file.join_path(tab)
return concat(tab,io.pathseparator) -- can have trailing //
end
+-- we can hash them weakly
+
function file.collapse_path(str)
- str = gsub(str,"/%./","/")
- local n, m = 1, 1
- while n > 0 or m > 0 do
- str, n = gsub(str,"[^/%.]+/%.%.$","")
- str, m = gsub(str,"[^/%.]+/%.%./","")
- end
- str = gsub(str,"([^/])/$","%1")
- str = gsub(str,"^%./","")
- str = gsub(str,"/%.$","")
+ str = gsub(str,"\\","/")
+ if find(str,"/") then
+ str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified
+ str = gsub(str,"/%./","/")
+ local n, m = 1, 1
+ while n > 0 or m > 0 do
+ str, n = gsub(str,"[^/%.]+/%.%.$","")
+ str, m = gsub(str,"[^/%.]+/%.%./","")
+ end
+ str = gsub(str,"([^/])/$","%1")
+ -- str = gsub(str,"^%./","") -- ./xx in qualified
+ str = gsub(str,"/%.$","")
+ end
if str == "" then str = "." end
return str
end
+--~ print(file.collapse_path("/a"))
--~ print(file.collapse_path("a/./b/.."))
--~ print(file.collapse_path("a/aa/../b/bb"))
--~ print(file.collapse_path("a/../.."))
@@ -1418,27 +1678,27 @@ end
--~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.C(noperiod^1) * -1
--~ function file.extname(name)
---~ return pattern:match(name) or ""
+--~ return lpegmatch(pattern,name) or ""
--~ end
--~ local pattern = lpeg.Cs(((period * noperiod^1 * -1)/"" + 1)^1)
--~ function file.removesuffix(name)
---~ return pattern:match(name)
+--~ return lpegmatch(pattern,name)
--~ end
--~ local pattern = (noslashes^0 * slashes)^1 * lpeg.C(noslashes^1) * -1
--~ function file.basename(name)
---~ return pattern:match(name) or name
+--~ return lpegmatch(pattern,name) or name
--~ end
--~ local pattern = (noslashes^0 * slashes)^1 * lpeg.Cp() * noslashes^1 * -1
--~ function file.dirname(name)
---~ local p = pattern:match(name)
+--~ local p = lpegmatch(pattern,name)
--~ if p then
---~ return name:sub(1,p-2)
+--~ return sub(name,1,p-2)
--~ else
--~ return ""
--~ end
@@ -1447,7 +1707,7 @@ end
--~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
--~ function file.addsuffix(name, suffix)
---~ local p = pattern:match(name)
+--~ local p = lpegmatch(pattern,name)
--~ if p then
--~ return name
--~ else
@@ -1458,9 +1718,9 @@ end
--~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
--~ function file.replacesuffix(name,suffix)
---~ local p = pattern:match(name)
+--~ local p = lpegmatch(pattern,name)
--~ if p then
---~ return name:sub(1,p-2) .. "." .. suffix
+--~ return sub(name,1,p-2) .. "." .. suffix
--~ else
--~ return name .. "." .. suffix
--~ end
@@ -1469,11 +1729,11 @@ end
--~ local pattern = (noslashes^0 * slashes)^0 * lpeg.Cp() * ((noperiod^1 * period)^1 * lpeg.Cp() + lpeg.P(true)) * noperiod^1 * -1
--~ function file.nameonly(name)
---~ local a, b = pattern:match(name)
+--~ local a, b = lpegmatch(pattern,name)
--~ if b then
---~ return name:sub(a,b-2)
+--~ return sub(name,a,b-2)
--~ elseif a then
---~ return name:sub(a)
+--~ return sub(name,a)
--~ else
--~ return name
--~ end
@@ -1507,11 +1767,11 @@ local rootbased = lpeg.P("/") + letter*lpeg.P(":")
-- ./name ../name /name c: :// name/name
function file.is_qualified_path(filename)
- return qualified:match(filename)
+ return lpegmatch(qualified,filename) ~= nil
end
function file.is_rootbased_path(filename)
- return rootbased:match(filename)
+ return lpegmatch(rootbased,filename) ~= nil
end
local slash = lpeg.S("\\/")
@@ -1524,29 +1784,38 @@ local base = lpeg.C((1-suffix)^0)
local pattern = (drive + lpeg.Cc("")) * (path + lpeg.Cc("")) * (base + lpeg.Cc("")) * (suffix + lpeg.Cc(""))
function file.splitname(str) -- returns drive, path, base, suffix
- return pattern:match(str)
+ return lpegmatch(pattern,str)
end
--- function test(t) for k, v in pairs(t) do print(v, "=>", file.splitname(v)) end end
+-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
--
-- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
-- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" }
-- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
-- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
+--~ -- todo:
+--~
+--~ if os.type == "windows" then
+--~ local currentdir = lfs.currentdir
+--~ function lfs.currentdir()
+--~ return (gsub(currentdir(),"\\","/"))
+--~ end
+--~ end
+
end -- closure
do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['l-io'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-local byte = string.byte
+local byte, find, gsub = string.byte, string.find, string.gsub
if string.find(os.getenv("PATH"),";") then
io.fileseparator, io.pathseparator = "\\", ";"
@@ -1575,7 +1844,7 @@ function io.savedata(filename,data,joiner)
elseif type(data) == "function" then
data(f)
else
- f:write(data)
+ f:write(data or "")
end
f:close()
return true
@@ -1704,20 +1973,21 @@ function io.ask(question,default,options)
end
io.write(string.format(" "))
local answer = io.read()
- answer = answer:gsub("^%s*(.*)%s*$","%1")
+ answer = gsub(answer,"^%s*(.*)%s*$","%1")
if answer == "" and default then
return default
elseif not options then
return answer
else
- for _,v in pairs(options) do
- if v == answer then
+ for k=1,#options do
+ if options[k] == answer then
return answer
end
end
local pattern = "^" .. answer
- for _,v in pairs(options) do
- if v:find(pattern) then
+ for k=1,#options do
+ local v = options[k]
+ if find(v,pattern) then
return v
end
end
@@ -1744,11 +2014,21 @@ statistics = {
starttiming = dummyfunction,
stoptiming = dummyfunction,
}
+directives = {
+ register = dummyfunction,
+ enable = dummyfunction,
+ disable = dummyfunction,
+}
trackers = {
register = dummyfunction,
enable = dummyfunction,
disable = dummyfunction,
}
+experiments = {
+ register = dummyfunction,
+ enable = dummyfunction,
+ disable = dummyfunction,
+}
storage = {
register = dummyfunction,
shared = { },
@@ -1763,6 +2043,9 @@ tasks = {
appendaction = dummyfunction,
prependaction = dummyfunction,
}
+callbacks = {
+ register = function(n,f) return callback.register(n,f) end,
+}
-- we need to cheat a bit here
@@ -1775,13 +2058,14 @@ local remapper = {
ttf = "truetype fonts",
ttc = "truetype fonts",
dfont = "truetype dictionary",
- cid = "other text files", -- will become "cid files"
+ cid = "cid maps",
+ fea = "font feature files",
}
function resolvers.find_file(name,kind)
name = string.gsub(name,"\\","\/")
kind = string.lower(kind)
- return kpse.find_file(name,(kind and kind ~= "" and (remapper[kind] or kind)) or "tex")
+ return kpse.find_file(name,(kind and kind ~= "" and (remapper[kind] or kind)) or file.extname(name,"tex"))
end
function resolvers.findbinfile(name,kind)
@@ -1791,13 +2075,64 @@ function resolvers.findbinfile(name,kind)
return resolvers.find_file(name,(kind and remapper[kind]) or kind)
end
+-- Caches ... I will make a real stupid version some day when I'm in the
+-- mood. After all, the generic code does not need the more advanced
+-- ConTeXt features. Cached data is not shared between ConTeXt and other
+-- usage as I don't want any dependency at all. Also, ConTeXt might have
+-- different needs and tricks added.
+
+caches = { }
+
+--~ containers.usecache = true
+
+function caches.setpath(category,subcategory)
+ local root = kpse.var_value("TEXMFCACHE") or ""
+ if root == "" then
+ root = kpse.var_value("VARTEXMF") or ""
+ end
+ if root ~= "" then
+ root = file.join(root,category)
+ lfs.mkdir(root)
+ root = file.join(root,subcategory)
+ lfs.mkdir(root)
+ return lfs.isdir(root) and root
+ end
+end
+
+local function makefullname(path,name)
+ if path and path ~= "" then
+ name = "temp-" and name -- clash prevention
+ return file.addsuffix(file.join(path,name),"lua")
+ end
+end
+
+function caches.iswritable(path,name)
+ local fullname = makefullname(path,name)
+ return fullname and file.iswritable(fullname)
+end
+
+function caches.loaddata(path,name)
+ local fullname = makefullname(path,name)
+ if fullname then
+ local data = loadfile(fullname)
+ return data and data()
+ end
+end
+
+function caches.savedata(path,name,data)
+ local fullname = makefullname(path,name)
+ if fullname then
+ table.tofile(fullname,data,'return',false,true,false)
+ end
+end
+
end -- closure
do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['data-con'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -1808,8 +2143,6 @@ local format, lower, gsub = string.format, string.lower, string.gsub
local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
-local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end)
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end)
--[[ldx--
<p>Once we found ourselves defining similar cache constructs
@@ -1873,7 +2206,7 @@ end
function containers.is_valid(container, name)
if name and name ~= "" then
local storage = container.storage[name]
- return storage and not table.is_empty(storage) and storage.cache_version == container.version
+ return storage and storage.cache_version == container.version
else
return false
end
@@ -1924,7 +2257,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['node-ini'] = {
version = 1.001,
- comment = "companion to node-ini.tex",
+ comment = "companion to node-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -2032,10 +2365,12 @@ local penalty = node.id('penalty')
local kern = node.id('kern')
local whatsit = node.id('whatsit')
-local traverse_id = node.traverse_id
-local traverse = node.traverse
-local free_node = node.free
-local remove_node = node.remove
+local traverse_id = node.traverse_id
+local traverse = node.traverse
+local free_node = node.free
+local remove_node = node.remove
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
function nodes.remove(head, current, free_too)
local t = current
@@ -2055,8 +2390,8 @@ function nodes.delete(head,current)
return nodes.remove(head,current,true)
end
-nodes.before = node.insert_before
-nodes.after = node.insert_after
+nodes.before = insert_node_before
+nodes.after = insert_node_after
-- we need to test this, as it might be fixed now
@@ -2096,21 +2431,31 @@ function nodes.after(h,c,n)
return n, n
end
-function nodes.replace(head,current,new)
- if current and next then
- local p, n = current.prev, current.next
- new.prev, new.next = p, n
- if p then
- p.next = new
- else
+-- local h, c = nodes.replace(head,current,new)
+-- local c = nodes.replace(false,current,new)
+-- local c = nodes.replace(current,new)
+
+function nodes.replace(head,current,new) -- no head returned if false
+ if not new then
+ head, current, new = false, head, current
+ end
+ local prev, next = current.prev, current.next
+ if next then
+ new.next, next.prev = next, new
+ end
+ if prev then
+ new.prev, prev.next = prev, new
+ end
+ if head then
+ if head == current then
head = new
end
- if n then
- n.prev = new
- end
free_node(current)
+ return head, new
+ else
+ free_node(current)
+ return new
end
- return head, current
end
-- will move
@@ -2136,7 +2481,7 @@ end
nodes.count = count
--- new
+-- new, will move
function attributes.ofnode(n)
local a = n.attr
@@ -2161,14 +2506,17 @@ do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['node-res'] = {
version = 1.001,
- comment = "companion to node-ini.tex",
+ comment = "companion to node-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
local gmatch, format = string.gmatch, string.format
-local copy_node, free_node, free_list, new_node = node.copy, node.free, node.flush_list, node.new
+local copy_node, free_node, free_list, new_node, node_type, node_id = node.copy, node.free, node.flush_list, node.new, node.type, node.id
+local tonumber, round = tonumber, math.round
+
+local glyph_node = node_id("glyph")
--[[ldx--
<p>The next function is not that much needed but in <l n='context'/> we use
@@ -2177,18 +2525,30 @@ for debugging <l n='luatex'/> node management.</p>
nodes = nodes or { }
+nodes.whatsits = { } -- table.swapped(node.whatsits())
+
local reserved = { }
+local whatsits = nodes.whatsits
+
+for k, v in next, node.whatsits() do
+ whatsits[k], whatsits[v] = v, k -- two way
+end
-function nodes.register(n)
+local function register_node(n)
reserved[#reserved+1] = n
return n
end
+nodes.register = register_node
+
function nodes.cleanup_reserved(nofboxes) -- todo
nodes.tracers.steppers.reset() -- todo: make a registration subsystem
local nr, nl = #reserved, 0
for i=1,nr do
- free_node(reserved[i])
+ local ri = reserved[i]
+ -- if not (ri.id == glue_spec and not ri.is_writable) then
+ free_node(reserved[i])
+ -- end
end
if nofboxes then
local tb = tex.box
@@ -2212,15 +2572,37 @@ function nodes.usage()
return t
end
-local disc = nodes.register(new_node("disc"))
-local kern = nodes.register(new_node("kern",1))
-local penalty = nodes.register(new_node("penalty"))
-local glue = nodes.register(new_node("glue"))
-local glue_spec = nodes.register(new_node("glue_spec"))
-local glyph = nodes.register(new_node("glyph",0))
-local textdir = nodes.register(new_node("whatsit",7))
-local rule = nodes.register(new_node("rule"))
-local latelua = nodes.register(new_node("whatsit",35))
+local disc = register_node(new_node("disc"))
+local kern = register_node(new_node("kern",1))
+local penalty = register_node(new_node("penalty"))
+local glue = register_node(new_node("glue")) -- glue.spec = nil
+local glue_spec = register_node(new_node("glue_spec"))
+local glyph = register_node(new_node("glyph",0))
+local textdir = register_node(new_node("whatsit",whatsits.dir)) -- 7 (6 is local par node)
+local rule = register_node(new_node("rule"))
+local latelua = register_node(new_node("whatsit",whatsits.late_lua)) -- 35
+local user_n = register_node(new_node("whatsit",whatsits.user_defined)) user_n.type = 100 -- 44
+local user_l = register_node(new_node("whatsit",whatsits.user_defined)) user_l.type = 110 -- 44
+local user_s = register_node(new_node("whatsit",whatsits.user_defined)) user_s.type = 115 -- 44
+local user_t = register_node(new_node("whatsit",whatsits.user_defined)) user_t.type = 116 -- 44
+local left_margin_kern = register_node(new_node("margin_kern",0))
+local right_margin_kern = register_node(new_node("margin_kern",1))
+local lineskip = register_node(new_node("glue",1))
+local baselineskip = register_node(new_node("glue",2))
+local leftskip = register_node(new_node("glue",8))
+local rightskip = register_node(new_node("glue",9))
+local temp = register_node(new_node("temp",0))
+
+function nodes.zeroglue(n)
+ local s = n.spec
+ return not writable or (
+ s.width == 0
+ and s.stretch == 0
+ and s.shrink == 0
+ and s.stretch_order == 0
+ and s.shrink_order == 0
+ )
+end
function nodes.glyph(fnt,chr)
local n = copy_node(glyph)
@@ -2228,48 +2610,195 @@ function nodes.glyph(fnt,chr)
if chr then n.char = chr end
return n
end
+
function nodes.penalty(p)
local n = copy_node(penalty)
n.penalty = p
return n
end
+
function nodes.kern(k)
local n = copy_node(kern)
n.kern = k
return n
end
-function nodes.glue(width,stretch,shrink)
- local n, s = copy_node(glue), copy_node(glue_spec)
- s.width, s.stretch, s.shrink = width, stretch, shrink
- n.spec = s
- return n
-end
+
function nodes.glue_spec(width,stretch,shrink)
local s = copy_node(glue_spec)
s.width, s.stretch, s.shrink = width, stretch, shrink
return s
end
+
+local function someskip(skip,width,stretch,shrink)
+ local n = copy_node(skip)
+ if not width then
+ -- no spec
+ elseif tonumber(width) then
+ local s = copy_node(glue_spec)
+ s.width, s.stretch, s.shrink = width, stretch, shrink
+ n.spec = s
+ else
+ -- shared
+ n.spec = copy_node(width)
+ end
+ return n
+end
+
+function nodes.glue(width,stretch,shrink)
+ return someskip(glue,width,stretch,shrink)
+end
+function nodes.leftskip(width,stretch,shrink)
+ return someskip(leftskip,width,stretch,shrink)
+end
+function nodes.rightskip(width,stretch,shrink)
+ return someskip(rightskip,width,stretch,shrink)
+end
+function nodes.lineskip(width,stretch,shrink)
+ return someskip(lineskip,width,stretch,shrink)
+end
+function nodes.baselineskip(width,stretch,shrink)
+ return someskip(baselineskip,width,stretch,shrink)
+end
+
function nodes.disc()
return copy_node(disc)
end
+
function nodes.textdir(dir)
local t = copy_node(textdir)
t.dir = dir
return t
end
-function nodes.rule(w,h,d)
+
+function nodes.rule(width,height,depth,dir)
local n = copy_node(rule)
- if w then n.width = w end
- if h then n.height = h end
- if d then n.depth = d end
+ if width then n.width = width end
+ if height then n.height = height end
+ if depth then n.depth = depth end
+ if dir then n.dir = dir end
return n
end
+
function nodes.latelua(code)
local n = copy_node(latelua)
n.data = code
return n
end
+function nodes.leftmarginkern(glyph,width)
+ local n = copy_node(left_margin_kern)
+ if not glyph then
+ logs.fatal("nodes","invalid pointer to left margin glyph node")
+ elseif glyph.id ~= glyph_node then
+ logs.fatal("nodes","invalid node type %s for left margin glyph node",node_type(glyph))
+ else
+ n.glyph = glyph
+ end
+ if width then
+ n.width = width
+ end
+ return n
+end
+
+function nodes.rightmarginkern(glyph,width)
+ local n = copy_node(right_margin_kern)
+ if not glyph then
+ logs.fatal("nodes","invalid pointer to right margin glyph node")
+ elseif glyph.id ~= glyph_node then
+ logs.fatal("nodes","invalid node type %s for right margin glyph node",node_type(p))
+ else
+ n.glyph = glyph
+ end
+ if width then
+ n.width = width
+ end
+ return n
+end
+
+function nodes.temp()
+ return copy_node(temp)
+end
+--[[
+<p>At some point we ran into a problem that the glue specification
+of the zeropoint dimension was overwritten when adapting a glue spec
+node. This is a side effect of glue specs being shared. After a
+couple of hours tracing and debugging Taco and I came to the
+conclusion that it made no sense to complicate the spec allocator
+and settled on a writable flag. This all is a side effect of the
+fact that some glues use reserved memory slots (with the zeropoint
+glue being a noticeable one). So, next we wrap this into a function
+and hide it for the user. And yes, LuaTeX now gives a warning as
+well.</p>
+]]--
+
+if tex.luatexversion > 51 then
+
+ function nodes.writable_spec(n)
+ local spec = n.spec
+ if not spec then
+ spec = copy_node(glue_spec)
+ n.spec = spec
+ elseif not spec.writable then
+ spec = copy_node(spec)
+ n.spec = spec
+ end
+ return spec
+ end
+
+else
+
+ function nodes.writable_spec(n)
+ local spec = n.spec
+ if not spec then
+ spec = copy_node(glue_spec)
+ else
+ spec = copy_node(spec)
+ end
+ n.spec = spec
+ return spec
+ end
+
+end
+
+local cache = { }
+
+function nodes.usernumber(num)
+ local n = cache[num]
+ if n then
+ return copy_node(n)
+ else
+ local n = copy_node(user_n)
+ if num then n.value = num end
+ return n
+ end
+end
+
+function nodes.userlist(list)
+ local n = copy_node(user_l)
+ if list then n.value = list end
+ return n
+end
+
+local cache = { } -- we could use the same cache
+
+function nodes.userstring(str)
+ local n = cache[str]
+ if n then
+ return copy_node(n)
+ else
+ local n = copy_node(user_s)
+ n.type = 115
+ if str then n.value = str end
+ return n
+ end
+end
+
+function nodes.usertokens(tokens)
+ local n = copy_node(user_t)
+ if tokens then n.value = tokens end
+ return n
+end
+
statistics.register("cleaned up reserved nodes", function()
return format("%s nodes, %s lists of %s", nodes.cleanup_reserved(tex.count["lastallocatedbox"]))
end) -- \topofboxstack
@@ -2284,7 +2813,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['node-inj'] = {
version = 1.001,
- comment = "companion to node-ini.tex",
+ comment = "companion to node-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -2294,7 +2823,8 @@ if not modules then modules = { } end modules ['node-inj'] = {
-- This is very experimental (this will change when we have luatex > .50 and
-- a few pending thingies are available. Also, Idris needs to make a few more
--- test fonts.
+-- test fonts. Btw, future versions of luatex will have extended glyph properties
+-- that can be of help.
local next = next
@@ -2334,6 +2864,8 @@ local kerns = { }
-- explicitly i will provide an alternative; also, we can share
-- tables
+-- for the moment we pass the r2l key ... volt/arabtype tests
+
function nodes.set_cursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
local ws, wn = tfmstart.width, tfmnext.width
@@ -2344,18 +2876,19 @@ function nodes.set_cursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
return dx, dy, bound
end
-function nodes.set_pair(current,factor,rlmode,spec,tfmchr)
+function nodes.set_pair(current,factor,rlmode,r2lflag,spec,tfmchr)
local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
-- dy = y - h
if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
local bound = has_attribute(current,kernpair)
if bound then
local kb = kerns[bound]
- kb[2], kb[3], kb[4], kb[5] = kb[2] + x, kb[3] + y, kb[4] + w, kb[5] + h
+ -- inefficient but singles have less, but weird anyway, needs checking
+ kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
else
bound = #kerns + 1
set_attribute(current,kernpair,bound)
- kerns[bound] = { rlmode, x, y, w, h }
+ kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
end
return x, y, w, h, bound
end
@@ -2455,6 +2988,7 @@ end
function nodes.inject_kerns(head,where,keep)
local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
if has_marks or has_cursives then
+--~ if has_marks or has_cursives or has_kerns then
if trace_injections then
nodes.trace_injection(head)
end
@@ -2474,7 +3008,7 @@ function nodes.inject_kerns(head,where,keep)
if k then
local kk = kerns[k]
if kk then
- local x, y, w, h = kk[2], kk[3], kk[4], kk[5]
+ local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
local dy = y - h
if dy ~= 0 then
ky[n] = dy
@@ -2589,14 +3123,15 @@ function nodes.inject_kerns(head,where,keep)
local d = mrks[index]
if d then
-- local rlmode = d[3] -- not used
- -- if rlmode and rlmode < 0 then
- -- n.xoffset = p.xoffset + d[1]
+ -- if rlmode and rlmode > 0 then
+ -- todo
-- else
- n.xoffset = p.xoffset - d[1]
---~ local k = wx[p]
---~ if k then
---~ wx[n] = k
---~ end
+ local k = wx[p]
+ if k then
+ n.xoffset = p.xoffset - d[1] - k[2]
+ else
+ n.xoffset = p.xoffset - d[1]
+ end
-- end
if mk[p] then
n.yoffset = p.yoffset + d[2]
@@ -2618,23 +3153,41 @@ function nodes.inject_kerns(head,where,keep)
if next(wx) then
for n, k in next, wx do
-- only w can be nil, can be sped up when w == nil
- local rl, x, w = k[1], k[2] or 0, k[4] or 0
+ local rl, x, w, r2l = k[1], k[2] or 0, k[4] or 0, k[6]
local wx = w - x
- if rl < 0 then
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- -- if wx ~= 0 then
- -- insert_node_after(head,n,newkern(wx))
- -- end
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
+--~ if rl < 0 then
+--~ if r2l then
+--~ if wx ~= 0 then
+--~ insert_node_before(head,n,newkern(wx))
+--~ end
+--~ if x ~= 0 then
+--~ insert_node_after (head,n,newkern(x))
+--~ end
+--~ else
+--~ if x ~= 0 then
+--~ insert_node_before(head,n,newkern(x))
+--~ end
+--~ if wx ~= 0 then
+--~ insert_node_after(head,n,newkern(wx))
+--~ end
+--~ end
+--~ else
+ if r2l then
+ if wx ~= 0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x ~= 0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx ~= 0 then
+ insert_node_after(head,n,newkern(wx))
+ end
end
- end
+--~ end
end
end
if next(cx) then
@@ -2660,29 +3213,54 @@ function nodes.inject_kerns(head,where,keep)
if trace_injections then
nodes.trace_injection(head)
end
- -- we assume done is true because there are kerns
for n in traverse_id(glyph,head) do
local k = has_attribute(n,kernpair)
if k then
local kk = kerns[k]
if kk then
- -- only w can be nil, can be sped up when w == nil
- local rl, x, y, w = kk[1], kk[2] or 0, kk[3] or 0, kk[4] or 0
- if y ~= 0 then
+ local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
+ if y and y ~= 0 then
n.yoffset = y -- todo: h ?
end
- local wx = w - x
- if rl < 0 then
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
+ if w then
+ -- copied from above
+ local r2l = kk[6]
+ local wx = w - x
+--~ if rl < 0 then
+--~ if r2l then
+--~ if x ~= 0 then
+--~ insert_node_before(head,n,newkern(x))
+--~ end
+--~ if wx ~= 0 then
+--~ insert_node_after(head,n,newkern(wx))
+--~ end
+--~ else
+--~ if wx ~= 0 then
+--~ insert_node_before(head,n,newkern(wx))
+--~ end
+--~ if x ~= 0 then
+--~ insert_node_after (head,n,newkern(x))
+--~ end
+--~ end
+--~ else
+ if r2l then
+ if wx ~= 0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x ~= 0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx ~= 0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
+--~ end
else
- -- if wx ~= 0 then
- -- insert_node_after(head,n,newkern(wx))
- -- end
+ -- simple (e.g. kernclass kerns)
if x ~= 0 then
insert_node_before(head,n,newkern(x))
end
@@ -2706,7 +3284,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['node-fnt'] = {
version = 1.001,
- comment = "companion to font-ini.tex",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -2751,13 +3329,15 @@ if tex.attribute[0] < 0 then
end
+-- this will be redone and split in a generic one and a context one
+
function nodes.process_characters(head)
-- either next or not, but definitely no already processed list
starttiming(nodes)
local usedfonts, attrfonts, done = { }, { }, false
local a, u, prevfont, prevattr = 0, 0, nil, 0
for n in traverse_id(glyph,head) do
- local font, attr = n.font, has_attribute(n,0) -- zero attribute is reserved for fonts, preset to 0 is faster (first match)
+ local font, attr = n.font, has_attribute(n,0) -- zero attribute is reserved for fonts in context
if attr and attr > 0 then
if font ~= prevfont or attr ~= prevattr then
local used = attrfonts[font]
@@ -2813,7 +3393,7 @@ function nodes.process_characters(head)
head, done = h or head, done or d
if n > 1 then
for i=2,n do
- local h, d = processors[i](head,font,0) -- false)
+ local h, d = processors[i](head,font,false)
head, done = h or head, done or d
end
end
@@ -2825,7 +3405,7 @@ function nodes.process_characters(head)
head, done = h or head, done or d
if n > 1 then
for i=2,n do
- local h, d = processors[i](head,font,0) -- false)
+ local h, d = processors[i](head,font,false)
head, done = h or head, done or d
end
end
@@ -2884,7 +3464,6 @@ else do
-- X000 1100 = 12 = 0x1C = leftghost
-- X001 0100 = 20 = 0x14 = rightghost
-
function nodes.protect_glyphs(head)
local done = false
for g in traverse_id(glyph,head) do
@@ -2941,7 +3520,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['font-ini'] = {
version = 1.001,
- comment = "companion to font-ini.tex",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -2952,6 +3531,8 @@ if not modules then modules = { } end modules ['font-ini'] = {
--ldx]]--
local utf = unicode.utf8
+local format, serialize = string.format, table.serialize
+local write_nl = texio.write_nl
if not fontloader then fontloader = fontforge end
@@ -2961,7 +3542,11 @@ fontloader.totable = fontloader.to_table
-- fix comes last
fonts = fonts or { }
-fonts.ids = fonts.ids or { } -- aka fontdata
+
+fonts.ids = fonts.ids or { } fonts.identifiers = fonts.ids -- aka fontdata
+fonts.chr = fonts.chr or { } fonts.characters = fonts.chr -- aka chardata
+fonts.qua = fonts.qua or { } fonts.quads = fonts.qua -- aka quaddata
+
fonts.tfm = fonts.tfm or { }
fonts.mode = 'base'
@@ -2971,8 +3556,11 @@ fonts.verbose = false -- more verbose cache tables
fonts.ids[0] = { -- nullfont
characters = { },
descriptions = { },
+ name = "nullfont",
}
+fonts.chr[0] = { }
+
fonts.methods = fonts.methods or {
base = { tfm = { }, afm = { }, otf = { }, vtf = { }, fix = { } },
node = { tfm = { }, afm = { }, otf = { }, vtf = { }, fix = { } },
@@ -3002,38 +3590,27 @@ fonts.define.specify.synonyms = fonts.define.specify.synonyms or { }
-- tracing
-fonts.color = fonts.color or { }
-
-local attribute = attributes.private('color')
-local mapping = (attributes and attributes.list[attribute]) or { }
+if not fonts.color then
-local set_attribute = node.set_attribute
-local unset_attribute = node.unset_attribute
+ fonts.color = {
+ set = function() end,
+ reset = function() end,
+ }
-function fonts.color.set(n,c)
- local mc = mapping[c]
- if not mc then
- unset_attribute(n,attribute)
- else
- set_attribute(n,attribute,mc)
- end
-end
-function fonts.color.reset(n)
- unset_attribute(n,attribute)
end
--- this will change ...
+-- format identification
-function fonts.show_char_data(n)
- local tfmdata = fonts.ids[font.current()]
- if tfmdata then
- if type(n) == "string" then
- n = utf.byte(n)
- end
- local chr = tfmdata.characters[n]
- if chr then
- texio.write_nl(table.serialize(chr,string.format("U_%04X",n)))
- end
+fonts.formats = { }
+
+function fonts.fontformat(filename,default)
+ local extname = file.extname(filename)
+ local format = fonts.formats[extname]
+ if format then
+ return format
+ else
+ logs.report("fonts define","unable to detemine font format for '%s'",filename)
+ return default
end
end
@@ -3043,7 +3620,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['font-tfm'] = {
version = 1.001,
- comment = "companion to font-ini.tex",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -3079,6 +3656,7 @@ fonts.initializers = fonts.initializers or { }
fonts.initializers.common = fonts.initializers.common or { }
local fontdata = fonts.ids
+local disc = node.id('disc')
local glyph = node.id('glyph')
local set_attribute = node.set_attribute
@@ -3090,33 +3668,9 @@ supplied by <l n='luatex'/>.</p>
tfm.resolve_vf = true -- false
tfm.share_base_kerns = false -- true (.5 sec slower on mk but brings down mem from 410M to 310M, beware: then script/lang share too)
tfm.mathactions = { }
+tfm.fontname_mode = "fullpath"
-function tfm.enhance(tfmdata,specification)
- local name, size = specification.name, specification.size
- local encoding, filename = match(name,"^(.-)%-(.*)$") -- context: encoding-name.*
- if filename and encoding and fonts.enc.known[encoding] then
- local data = fonts.enc.load(encoding)
- if data then
- local characters = tfmdata.characters
- tfmdata.encoding = encoding
- local vector = data.vector
- local original = { }
- for k, v in next, characters do
- v.name = vector[k]
- v.index = k
- original[k] = v
- end
- for k,v in next, data.unicodes do
- if k ~= v then
- if trace_defining then
- logs.report("define font","mapping %s onto %s",k,v)
- end
- characters[k] = original[v]
- end
- end
- end
- end
-end
+tfm.enhance = tfm.enhance or function() end
function tfm.read_from_tfm(specification)
local fname, tfmdata = specification.filename or "", nil
@@ -3229,36 +3783,36 @@ fonts.trace_scaling = false
-- basekerns are scaled and will be hashed by table id
-- sharedkerns are unscaled and are be hashed by concatenated indexes
-function tfm.check_base_kerns(tfmdata)
- if tfm.share_base_kerns then
- local sharedkerns = tfmdata.sharedkerns
- if sharedkerns then
- local basekerns = { }
- tfmdata.basekerns = basekerns
- return sharedkerns, basekerns
- end
- end
- return nil, nil
-end
+--~ function tfm.check_base_kerns(tfmdata)
+--~ if tfm.share_base_kerns then
+--~ local sharedkerns = tfmdata.sharedkerns
+--~ if sharedkerns then
+--~ local basekerns = { }
+--~ tfmdata.basekerns = basekerns
+--~ return sharedkerns, basekerns
+--~ end
+--~ end
+--~ return nil, nil
+--~ end
-function tfm.prepare_base_kerns(tfmdata)
- if tfm.share_base_kerns and not tfmdata.sharedkerns then
- local sharedkerns = { }
- tfmdata.sharedkerns = sharedkerns
- for u, chr in next, tfmdata.characters do
- local kerns = chr.kerns
- if kerns then
- local hash = concat(sortedkeys(kerns), " ")
- local base = sharedkerns[hash]
- if not base then
- sharedkerns[hash] = kerns
- else
- chr.kerns = base
- end
- end
- end
- end
-end
+--~ function tfm.prepare_base_kerns(tfmdata)
+--~ if tfm.share_base_kerns and not tfmdata.sharedkerns then
+--~ local sharedkerns = { }
+--~ tfmdata.sharedkerns = sharedkerns
+--~ for u, chr in next, tfmdata.characters do
+--~ local kerns = chr.kerns
+--~ if kerns then
+--~ local hash = concat(sortedkeys(kerns), " ")
+--~ local base = sharedkerns[hash]
+--~ if not base then
+--~ sharedkerns[hash] = kerns
+--~ else
+--~ chr.kerns = base
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
-- we can have cache scaled characters when we are in node mode and don't have
-- protruding and expansion: hash == fullname @ size @ protruding @ expansion
@@ -3271,13 +3825,21 @@ local charactercache = { }
-- a virtual font has italic correction make sure to set the
-- has_italic flag. Some more flags will be added in the future.
-function tfm.do_scale(tfmtable, scaledpoints)
- tfm.prepare_base_kerns(tfmtable) -- optimalization
+function tfm.calculate_scale(tfmtable, scaledpoints, relativeid)
if scaledpoints < 0 then
scaledpoints = (- scaledpoints/1000) * tfmtable.designsize -- already in sp
end
- local delta = scaledpoints/(tfmtable.units or 1000) -- brr, some open type fonts have 2048
- local t = { }
+ local units = tfmtable.units or 1000
+ local delta = scaledpoints/units -- brr, some open type fonts have 2048
+ return scaledpoints, delta, units
+end
+
+function tfm.do_scale(tfmtable, scaledpoints, relativeid)
+ -- tfm.prepare_base_kerns(tfmtable) -- optimalization
+ local t = { } -- the new table
+ local scaledpoints, delta, units = tfm.calculate_scale(tfmtable, scaledpoints, relativeid)
+ t.units_per_em = units or 1000
+ local hdelta, vdelta = delta, delta
-- unicoded unique descriptions shared cidinfo characters changed parameters indices
for k,v in next, tfmtable do
if type(v) == "table" then
@@ -3286,6 +3848,19 @@ function tfm.do_scale(tfmtable, scaledpoints)
t[k] = v
end
end
+ local extend_factor = tfmtable.extend_factor or 0
+ if extend_factor ~= 0 and extend_factor ~= 1 then
+ hdelta = hdelta * extend_factor
+ t.extend = extend_factor * 1000
+ else
+ t.extend = 1000
+ end
+ local slant_factor = tfmtable.slant_factor or 0
+ if slant_factor ~= 0 then
+ t.slant = slant_factor * 1000
+ else
+ t.slant = 0
+ end
-- status
local isvirtual = tfmtable.type == "virtual" or tfmtable.virtualized
local hasmath = (tfmtable.math_parameters ~= nil and next(tfmtable.math_parameters) ~= nil) or (tfmtable.MathConstants ~= nil and next(tfmtable.MathConstants) ~= nil)
@@ -3301,6 +3876,9 @@ function tfm.do_scale(tfmtable, scaledpoints)
t.unicodes = tfmtable.unicodes
t.indices = tfmtable.indices
t.marks = tfmtable.marks
+t.goodies = tfmtable.goodies
+t.colorscheme = tfmtable.colorscheme
+--~ t.embedding = tfmtable.embedding
t.descriptions = descriptions
if tfmtable.fonts then
t.fonts = table.fastcopy(tfmtable.fonts) -- hm also at the end
@@ -3310,18 +3888,18 @@ function tfm.do_scale(tfmtable, scaledpoints)
local tfmp = tfmtable.parameters -- let's check for indexes
--
tp.slant = (tfmp.slant or tfmp[1] or 0)
- tp.space = (tfmp.space or tfmp[2] or 0)*delta
- tp.space_stretch = (tfmp.space_stretch or tfmp[3] or 0)*delta
- tp.space_shrink = (tfmp.space_shrink or tfmp[4] or 0)*delta
- tp.x_height = (tfmp.x_height or tfmp[5] or 0)*delta
- tp.quad = (tfmp.quad or tfmp[6] or 0)*delta
- tp.extra_space = (tfmp.extra_space or tfmp[7] or 0)*delta
+ tp.space = (tfmp.space or tfmp[2] or 0)*hdelta
+ tp.space_stretch = (tfmp.space_stretch or tfmp[3] or 0)*hdelta
+ tp.space_shrink = (tfmp.space_shrink or tfmp[4] or 0)*hdelta
+ tp.x_height = (tfmp.x_height or tfmp[5] or 0)*vdelta
+ tp.quad = (tfmp.quad or tfmp[6] or 0)*hdelta
+ tp.extra_space = (tfmp.extra_space or tfmp[7] or 0)*hdelta
local protrusionfactor = (tp.quad ~= 0 and 1000/tp.quad) or 0
local tc = t.characters
local characters = tfmtable.characters
local nameneeded = not tfmtable.shared.otfdata --hack
local changed = tfmtable.changed or { } -- for base mode
- local ischanged = not table.is_empty(changed)
+ local ischanged = changed and next(changed)
local indices = tfmtable.indices
local luatex = tfmtable.luatex
local tounicode = luatex and luatex.tounicode
@@ -3329,14 +3907,15 @@ function tfm.do_scale(tfmtable, scaledpoints)
local defaultheight = luatex and luatex.defaultheight or 0
local defaultdepth = luatex and luatex.defaultdepth or 0
-- experimental, sharing kerns (unscaled and scaled) saves memory
- local sharedkerns, basekerns = tfm.check_base_kerns(tfmtable)
+ -- local sharedkerns, basekerns = tfm.check_base_kerns(tfmtable)
-- loop over descriptions (afm and otf have descriptions, tfm not)
-- there is no need (yet) to assign a value to chr.tonunicode
- local scaledwidth = defaultwidth * delta
- local scaledheight = defaultheight * delta
- local scaleddepth = defaultdepth * delta
+ local scaledwidth = defaultwidth * hdelta
+ local scaledheight = defaultheight * vdelta
+ local scaleddepth = defaultdepth * vdelta
local stackmath = tfmtable.ignore_stack_math ~= true
-local private = fonts.private
+ local private = fonts.private
+ local sharedkerns = { }
for k,v in next, characters do
local chr, description, index
if ischanged then
@@ -3357,9 +3936,9 @@ local private = fonts.private
local width = description.width
local height = description.height
local depth = description.depth
- if width then width = delta*width else width = scaledwidth end
- if height then height = delta*height else height = scaledheight end
- -- if depth then depth = delta*depth else depth = scaleddepth end
+ if width then width = hdelta*width else width = scaledwidth end
+ if height then height = vdelta*height else height = scaledheight end
+ -- if depth then depth = vdelta*depth else depth = scaleddepth end
if depth and depth ~= 0 then
depth = delta*depth
if nameneeded then
@@ -3405,6 +3984,7 @@ local private = fonts.private
end
end
if hasquality then
+ -- we could move these calculations elsewhere (saves calculations)
local ve = v.expansion_factor
if ve then
chr.expansion_factor = ve*1000 -- expansionfactor, hm, can happen elsewhere
@@ -3422,7 +4002,7 @@ local private = fonts.private
if hasitalic then
local vi = description.italic or v.italic
if vi and vi ~= 0 then
- chr.italic = vi*delta
+ chr.italic = vi*hdelta
end
end
-- to be tested
@@ -3438,14 +4018,19 @@ local private = fonts.private
for i=1,#vv do
local vvi = vv[i]
t[i] = {
- ["start"] = (vvi["start"] or 0)*delta,
- ["end"] = (vvi["end"] or 0)*delta,
- ["advance"] = (vvi["advance"] or 0)*delta,
+ ["start"] = (vvi["start"] or 0)*vdelta,
+ ["end"] = (vvi["end"] or 0)*vdelta,
+ ["advance"] = (vvi["advance"] or 0)*vdelta,
["extender"] = vvi["extender"],
["glyph"] = vvi["glyph"],
}
end
chr.vert_variants = t
+ --~ local ic = v.vert_italic_correction
+ --~ if ic then
+ --~ chr.italic = ic * hdelta
+ --~ print(format("0x%05X -> %s",k,chr.italic))
+ --~ end
else
local hv = v.horiz_variants
if hv then
@@ -3453,9 +4038,9 @@ local private = fonts.private
for i=1,#hv do
local hvi = hv[i]
t[i] = {
- ["start"] = (hvi["start"] or 0)*delta,
- ["end"] = (hvi["end"] or 0)*delta,
- ["advance"] = (hvi["advance"] or 0)*delta,
+ ["start"] = (hvi["start"] or 0)*hdelta,
+ ["end"] = (hvi["end"] or 0)*hdelta,
+ ["advance"] = (hvi["advance"] or 0)*hdelta,
["extender"] = hvi["extender"],
["glyph"] = hvi["glyph"],
}
@@ -3466,31 +4051,23 @@ local private = fonts.private
end
local vt = description.top_accent
if vt then
- chr.top_accent = delta*vt
+ chr.top_accent = vdelta*vt
end
if stackmath then
local mk = v.mathkerns
if mk then
local kerns = { }
- -- for k, v in next, mk do
- -- local kk = { }
- -- for i=1,#v do
- -- local vi = v[i]
- -- kk[i] = { height = delta*vi.height, kern = delta*vi.kern }
- -- end
- -- kerns[k] = kk
- -- end
local v = mk.top_right if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = delta*vi.height, kern = delta*vi.kern }
+ k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
end kerns.top_right = k end
local v = mk.top_left if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = delta*vi.height, kern = delta*vi.kern }
+ k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
end kerns.top_left = k end
local v = mk.bottom_left if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = delta*vi.height, kern = delta*vi.kern }
+ k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
end kerns.bottom_left = k end
local v = mk.bottom_right if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = delta*vi.height, kern = delta*vi.kern }
+ k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
end kerns.bottom_right = k end
chr.mathkern = kerns -- singular
end
@@ -3499,19 +4076,26 @@ local private = fonts.private
if not nodemode then
local vk = v.kerns
if vk then
- if sharedkerns then
- local base = basekerns[vk] -- hashed by table id, not content
- if not base then
- base = {}
- for k,v in next, vk do base[k] = v*delta end
- basekerns[vk] = base
- end
- chr.kerns = base
- else
- local tt = {}
- for k,v in next, vk do tt[k] = v*delta end
- chr.kerns = tt
+ --~ if sharedkerns then
+ --~ local base = basekerns[vk] -- hashed by table id, not content
+ --~ if not base then
+ --~ base = {}
+ --~ for k,v in next, vk do base[k] = v*hdelta end
+ --~ basekerns[vk] = base
+ --~ end
+ --~ chr.kerns = base
+ --~ else
+ --~ local tt = {}
+ --~ for k,v in next, vk do tt[k] = v*hdelta end
+ --~ chr.kerns = tt
+ --~ end
+ local s = sharedkerns[vk]
+ if not s then
+ s = { }
+ for k,v in next, vk do s[k] = v*hdelta end
+ sharedkerns[vk] = s
end
+ chr.kerns = s
end
local vl = v.ligatures
if vl then
@@ -3530,6 +4114,8 @@ local private = fonts.private
local vc = v.commands
if vc then
-- we assume non scaled commands here
+ -- tricky .. we need to scale pseudo math glyphs too
+ -- which is why we deal with rules too
local ok = false
for i=1,#vc do
local key = vc[i][1]
@@ -3543,8 +4129,12 @@ local private = fonts.private
for i=1,#vc do
local ivc = vc[i]
local key = ivc[1]
- if key == "right" or key == "down" then
- tt[#tt+1] = { key, ivc[2]*delta }
+ if key == "right" then
+ tt[#tt+1] = { key, ivc[2]*hdelta }
+ elseif key == "down" then
+ tt[#tt+1] = { key, ivc[2]*vdelta }
+ elseif key == "rule" then
+ tt[#tt+1] = { key, ivc[2]*vdelta, ivc[3]*hdelta }
else -- not comment
tt[#tt+1] = ivc -- shared since in cache and untouched
end
@@ -3560,6 +4150,8 @@ local private = fonts.private
-- t.encodingbytes, t.filename, t.fullname, t.name: elsewhere
t.size = scaledpoints
t.factor = delta
+ t.hfactor = hdelta
+ t.vfactor = vdelta
if t.fonts then
t.fonts = table.fastcopy(t.fonts) -- maybe we virtualize more afterwards
end
@@ -3567,20 +4159,20 @@ local private = fonts.private
-- mathematics.extras.copy(t) -- can be done elsewhere if needed
local ma = tfm.mathactions
for i=1,#ma do
- ma[i](t,tfmtable,delta)
+ ma[i](t,tfmtable,delta,hdelta,vdelta) -- what delta?
end
end
-- needed for \high cum suis
local tpx = tp.x_height
-if hasmath then
- if not tp[13] then tp[13] = .86*tpx end -- mathsupdisplay
- if not tp[14] then tp[14] = .86*tpx end -- mathsupnormal
- if not tp[15] then tp[15] = .86*tpx end -- mathsupcramped
- if not tp[16] then tp[16] = .48*tpx end -- mathsubnormal
- if not tp[17] then tp[17] = .48*tpx end -- mathsubcombined
- if not tp[22] then tp[22] = 0 end -- mathaxisheight
- if t.MathConstants then t.MathConstants.AccentBaseHeight = nil end -- safeguard
-end
+ if hasmath then
+ if not tp[13] then tp[13] = .86*tpx end -- mathsupdisplay
+ if not tp[14] then tp[14] = .86*tpx end -- mathsupnormal
+ if not tp[15] then tp[15] = .86*tpx end -- mathsupcramped
+ if not tp[16] then tp[16] = .48*tpx end -- mathsubnormal
+ if not tp[17] then tp[17] = .48*tpx end -- mathsubcombined
+ if not tp[22] then tp[22] = 0 end -- mathaxisheight
+ if t.MathConstants then t.MathConstants.AccentBaseHeight = nil end -- safeguard
+ end
t.tounicode = 1
t.cidinfo = tfmtable.cidinfo
-- we have t.name=metricfile and t.fullname=RealName and t.filename=diskfilename
@@ -3588,18 +4180,23 @@ end
-- can have multiple subfonts
if hasmath then
if trace_defining then
- logs.report("define font","math enabled for: %s %s %s",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
+ logs.report("define font","math enabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
end
else
if trace_defining then
- logs.report("define font","math disabled for: %s %s %s",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
+ logs.report("define font","math disabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
end
t.nomath, t.MathConstants = true, nil
end
- -- fullname is used in the subsetting
if not t.psname then
- t.psname = t.fullname -- else bad luck
+ -- name used in pdf file as well as for selecting subfont in ttc/dfont
+ t.psname = t.fontname or (t.fullname and fonts.names.cleanname(t.fullname))
end
+ if trace_defining then
+ logs.report("define font","used for accesing subfont: '%s'",t.psname or "nopsname")
+ logs.report("define font","used for subsetting: '%s'",t.fontname or "nofontname")
+ end
+--~ print(t.fontname,table.serialize(t.MathConstants))
return t, delta
end
@@ -3619,21 +4216,19 @@ local lastfont = nil
--
-- flushing the kern and ligature tables from memory saves a lot (only
-- base mode) but it complicates vf building where the new characters
--- demand this data
-
---~ for id, f in pairs(fonts.ids) do -- or font.fonts
---~ local ffi = font.fonts[id]
---~ f.characters = ffi.characters
---~ f.kerns = ffi.kerns
---~ f.ligatures = ffi.ligatures
---~ end
+-- demand this data .. solution: functions that access them
function tfm.cleanup_table(tfmdata) -- we need a cleanup callback, now we miss the last one
if tfm.auto_cleanup then -- ok, we can hook this into everyshipout or so ... todo
if tfmdata.type == 'virtual' or tfmdata.virtualized then
for k, v in next, tfmdata.characters do
- if v.commands then v.commands = nil end
+ if v.commands then v.commands = nil end
+ -- if v.kerns then v.kerns = nil end
end
+ else
+ -- for k, v in next, tfmdata.characters do
+ -- if v.kerns then v.kerns = nil end
+ -- end
end
end
end
@@ -3641,8 +4236,8 @@ end
function tfm.cleanup(tfmdata) -- we need a cleanup callback, now we miss the last one
end
-function tfm.scale(tfmtable, scaledpoints)
- local t, factor = tfm.do_scale(tfmtable, scaledpoints)
+function tfm.scale(tfmtable, scaledpoints, relativeid)
+ local t, factor = tfm.do_scale(tfmtable, scaledpoints, relativeid)
t.factor = factor
t.ascender = factor*(tfmtable.ascender or 0)
t.descender = factor*(tfmtable.descender or 0)
@@ -3676,7 +4271,8 @@ function fonts.analyzers.aux.setstate(head,font)
local descriptions = tfmdata.descriptions
local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
while current do
- if current.id == glyph and current.font == font then
+ local id = current.id
+ if id == glyph and current.font == font then
local d = descriptions[current.char]
if d then
if d.class == "mark" then
@@ -3697,6 +4293,10 @@ function fonts.analyzers.aux.setstate(head,font)
end
first, last, n = nil, nil, 0
end
+ elseif id == disc then
+ -- always in the middle
+ set_attribute(current,state,2) -- midi
+ last = current
else -- finish
if first and first == last then
set_attribute(last,state,4) -- isol
@@ -3724,157 +4324,35 @@ function tfm.replacements(tfm,value)
-- tfm.characters[0x0060] = tfm.characters[0x2018]
end
--- auto complete font with missing composed characters
-
-table.insert(fonts.manipulators,"compose")
-
-function fonts.initializers.common.compose(tfmdata,value)
- if value then
- fonts.vf.aux.compose_characters(tfmdata)
- end
-end
+-- checking
--- tfm features, experimental
-
-tfm.features = tfm.features or { }
-tfm.features.list = tfm.features.list or { }
-tfm.features.default = tfm.features.default or { }
-
-function tfm.enhance(tfmdata,specification)
- -- we don't really share tfm data because we always reload
- -- but this is more in sycn with afm and such
- local features = (specification.features and specification.features.normal ) or { }
- tfmdata.shared = tfmdata.shared or { }
- tfmdata.shared.features = features
- -- tfmdata.shared.tfmdata = tfmdata -- circular
- tfmdata.filename = specification.name
- if not features.encoding then
- local name, size = specification.name, specification.size
- local encoding, filename = match(name,"^(.-)%-(.*)$") -- context: encoding-name.*
- if filename and encoding and fonts.enc.known[encoding] then
- features.encoding = encoding
- end
- end
- tfm.set_features(tfmdata)
-end
-
-function tfm.set_features(tfmdata)
- -- todo: no local functions
- local shared = tfmdata.shared
--- local tfmdata = shared.tfmdata
- local features = shared.features
- if not table.is_empty(features) then
- local mode = tfmdata.mode or fonts.mode
- local fi = fonts.initializers[mode]
- if fi and fi.tfm then
- local function initialize(list) -- using tex lig and kerning
- if list then
- for i=1,#list do
- local f = list[i]
- local value = features[f]
- if value and fi.tfm[f] then -- brr
- if tfm.trace_features then
- logs.report("define font","initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown',tfmdata.name or 'unknown')
- end
- fi.tfm[f](tfmdata,value)
- mode = tfmdata.mode or fonts.mode
- fi = fonts.initializers[mode]
- end
- end
+function tfm.checked_filename(metadata,whatever)
+ local foundfilename = metadata.foundfilename
+ if not foundfilename then
+ local askedfilename = metadata.filename or ""
+ if askedfilename ~= "" then
+ foundfilename = resolvers.findbinfile(askedfilename,"") or ""
+ if foundfilename == "" then
+ logs.report("fonts","source file '%s' is not found",askedfilename)
+ foundfilename = resolvers.findbinfile(file.basename(askedfilename),"") or ""
+ if foundfilename ~= "" then
+ logs.report("fonts","using source file '%s' (cache mismatch)",foundfilename)
end
end
- initialize(fonts.triggers)
- initialize(tfm.features.list)
- initialize(fonts.manipulators)
+ elseif whatever then
+ logs.report("fonts","no source file for '%s'",whatever)
+ foundfilename = ""
end
- local fm = fonts.methods[mode]
- if fm and fm.tfm then
- local function register(list) -- node manipulations
- if list then
- for i=1,#list do
- local f = list[i]
- if features[f] and fm.tfm[f] then -- brr
- if not shared.processors then -- maybe also predefine
- shared.processors = { fm.tfm[f] }
- else
- shared.processors[#shared.processors+1] = fm.tfm[f]
- end
- end
- end
- end
- end
- register(tfm.features.list)
- end
- end
-end
-
-function tfm.features.register(name,default)
- tfm.features.list[#tfm.features.list+1] = name
- tfm.features.default[name] = default
-end
-
-function tfm.reencode(tfmdata,encoding)
- if encoding and fonts.enc.known[encoding] then
- local data = fonts.enc.load(encoding)
- if data then
- local characters, original, vector = tfmdata.characters, { }, data.vector
- tfmdata.encoding = encoding -- not needed
- for k, v in next, characters do
- v.name, v.index, original[k] = vector[k], k, v
- end
- for k,v in next, data.unicodes do
- if k ~= v then
- if trace_defining then
- logs.report("define font","reencoding U+%04X to U+%04X",k,v)
- end
- characters[k] = original[v]
- end
- end
- end
- end
-end
-
-tfm.features.register('reencode')
-
-fonts.initializers.base.tfm.reencode = tfm.reencode
-fonts.initializers.node.tfm.reencode = tfm.reencode
-
-fonts.enc = fonts.enc or { }
-fonts.enc.remappings = fonts.enc.remappings or { }
-
-function tfm.remap(tfmdata,remapping)
- local vector = remapping and fonts.enc.remappings[remapping]
- if vector then
- local characters, original = tfmdata.characters, { }
- for k, v in next, characters do
- original[k], characters[k] = v, nil
- end
- for k,v in next, vector do
- if k ~= v then
- if trace_defining then
- logs.report("define font","remapping U+%04X to U+%04X",k,v)
- end
- local c = original[k]
- characters[v] = c
- c.index = k
- end
- end
- tfmdata.encodingbytes = 2
- tfmdata.format = 'type1'
+ metadata.foundfilename = foundfilename
+ -- logs.report("fonts","using source file '%s'",foundfilename)
end
+ return foundfilename
end
-tfm.features.register('remap')
-
-fonts.initializers.base.tfm.remap = tfm.remap
-fonts.initializers.node.tfm.remap = tfm.remap
-
-- status info
statistics.register("fonts load time", function()
- if statistics.elapsedindeed(fonts) then
- return format("%s seconds",statistics.elapsedtime(fonts))
- end
+ return statistics.elapsedseconds(fonts)
end)
end -- closure
@@ -3889,8 +4367,9 @@ if not modules then modules = { } end modules ['font-cid'] = {
license = "see context related readme files"
}
-local format, match = string.format, string.match
+local format, match, lower = string.format, string.match, string.lower
local tonumber = tonumber
+local lpegmatch = lpeg.match
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
@@ -3944,7 +4423,7 @@ function fonts.cid.load(filename)
local data = io.loaddata(filename)
if data then
unicodes, names = { }, { }
- grammar:match(data)
+ lpegmatch(grammar,data)
local supplement, registry, ordering = match(filename,"^(.-)%-(.-)%-()%.(.-)$")
return {
supplement = supplement,
@@ -3961,9 +4440,11 @@ end
local template = "%s-%s-%s.cidmap"
+
local function locate(registry,ordering,supplement)
- local filename = string.lower(format(template,registry,ordering,supplement))
- local cidmap = fonts.cid.map[filename]
+ local filename = format(template,registry,ordering,supplement)
+ local hashname = lower(filename)
+ local cidmap = fonts.cid.map[hashname]
if not cidmap then
if trace_loading then
logs.report("load otf","checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
@@ -3975,7 +4456,7 @@ local function locate(registry,ordering,supplement)
if trace_loading then
logs.report("load otf","using cidmap file %s",filename)
end
- fonts.cid.map[filename] = cidmap
+ fonts.cid.map[hashname] = cidmap
cidmap.usedname = file.basename(filename)
return cidmap
end
@@ -4119,7 +4600,7 @@ otf.tables.scripts = {
['ugar'] = 'Ugaritic Cuneiform',
['xpeo'] = 'Old Persian Cuneiform',
['xsux'] = 'Sumero-Akkadian Cuneiform',
- ['yi' ] = 'Yi'
+ ['yi' ] = 'Yi',
}
otf.tables.languages = {
@@ -4601,6 +5082,7 @@ otf.tables.features = {
['rphf'] = 'Reph Form',
['rtbd'] = 'Right Bounds',
['rtla'] = 'Right-To-Left Alternates',
+ ['rtlm'] = 'Right To Left Math', -- math
['ruby'] = 'Ruby Notation Forms',
['salt'] = 'Stylistic Alternates',
['sinf'] = 'Scientific Inferiors',
@@ -4703,16 +5185,24 @@ local to_scripts = otf.tables.to_scripts
local to_languages = otf.tables.to_languages
local to_features = otf.tables.to_features
-for k, v in pairs(to_features) do
+for k, v in next, to_features do
local stripped = gsub(k,"%-"," ")
to_features[stripped] = v
local stripped = gsub(k,"[^a-zA-Z0-9]","")
to_features[stripped] = v
end
-for k, v in pairs(to_features) do
+for k, v in next, to_features do
to_features[lower(k)] = v
end
+otf.meanings.checkers = {
+ rand = function(v)
+ return v and "random"
+ end
+}
+
+local checkers = otf.meanings.checkers
+
function otf.meanings.normalize(features)
local h = { }
for k,v in next, features do
@@ -4741,7 +5231,9 @@ function otf.meanings.normalize(features)
v = b
end
end
- h[to_features[k] or k] = v
+ k = to_features[k] or k
+ local c = checkers[k]
+ h[k] = c and c(v) or v
end
end
return h
@@ -4980,9 +5472,384 @@ end -- closure
do -- begin closure to overcome local limits and interference
+if not modules then modules = { } end modules ['font-map'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local utf = unicode.utf8
+local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower
+local lpegmatch = lpeg.match
+local utfbyte = utf.byte
+
+local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+local trace_unimapping = false trackers.register("otf.unimapping", function(v) trace_unimapping = v end)
+
+local ctxcatcodes = tex and tex.ctxcatcodes
+
+--[[ldx--
+<p>Eventually this code will disappear because map files are kind
+of obsolete. Some code may move to runtime or auxiliary modules.</p>
+<p>The name to unciode related code will stay of course.</p>
+--ldx]]--
+
+fonts = fonts or { }
+fonts.map = fonts.map or { }
+
+local function load_lum_table(filename) -- will move to font goodies
+ local lumname = file.replacesuffix(file.basename(filename),"lum")
+ local lumfile = resolvers.find_file(lumname,"map") or ""
+ if lumfile ~= "" and lfs.isfile(lumfile) then
+ if trace_loading or trace_unimapping then
+ logs.report("load otf","enhance: loading %s ",lumfile)
+ end
+ lumunic = dofile(lumfile)
+ return lumunic, lumfile
+ end
+end
+
+local hex = lpeg.R("AF","09")
+local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end
+local hexsix = (hex^1) / function(s) return tonumber(s,16) end
+local dec = (lpeg.R("09")^1) / tonumber
+local period = lpeg.P(".")
+
+local unicode = lpeg.P("uni") * (hexfour * (period + lpeg.P(-1)) * lpeg.Cc(false) + lpeg.Ct(hexfour^1) * lpeg.Cc(true))
+local ucode = lpeg.P("u") * (hexsix * (period + lpeg.P(-1)) * lpeg.Cc(false) + lpeg.Ct(hexsix ^1) * lpeg.Cc(true))
+local index = lpeg.P("index") * dec * lpeg.Cc(false)
+
+local parser = unicode + ucode + index
+
+local parsers = { }
+
+local function make_name_parser(str)
+ if not str or str == "" then
+ return parser
+ else
+ local p = parsers[str]
+ if not p then
+ p = lpeg.P(str) * period * dec * lpeg.Cc(false)
+ parsers[str] = p
+ end
+ return p
+ end
+end
+
+--~ local parser = fonts.map.make_name_parser("Japan1")
+--~ local parser = fonts.map.make_name_parser()
+--~ local function test(str)
+--~ local b, a = lpegmatch(parser,str)
+--~ print((a and table.serialize(b)) or b)
+--~ end
+--~ test("a.sc")
+--~ test("a")
+--~ test("uni1234")
+--~ test("uni1234.xx")
+--~ test("uni12349876")
+--~ test("index1234")
+--~ test("Japan1.123")
+
+local function tounicode16(unicode)
+ if unicode < 0x10000 then
+ return format("%04X",unicode)
+ else
+ return format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
+ end
+end
+
+local function tounicode16sequence(unicodes)
+ local t = { }
+ for l=1,#unicodes do
+ local unicode = unicodes[l]
+ if unicode < 0x10000 then
+ t[l] = format("%04X",unicode)
+ else
+ t[l] = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
+ end
+ end
+ return concat(t)
+end
+
+--~ This is quite a bit faster but at the cost of some memory but if we
+--~ do this we will also use it elsewhere so let's not follow this route
+--~ now. I might use this method in the plain variant (no caching there)
+--~ but then I need a flag that distinguishes between code branches.
+--~
+--~ local cache = { }
+--~
+--~ function fonts.map.tounicode16(unicode)
+--~ local s = cache[unicode]
+--~ if not s then
+--~ if unicode < 0x10000 then
+--~ s = format("%04X",unicode)
+--~ else
+--~ s = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
+--~ end
+--~ cache[unicode] = s
+--~ end
+--~ return s
+--~ end
+
+fonts.map.load_lum_table = load_lum_table
+fonts.map.make_name_parser = make_name_parser
+fonts.map.tounicode16 = tounicode16
+fonts.map.tounicode16sequence = tounicode16sequence
+
+local separator = lpeg.S("_.")
+local other = lpeg.C((1 - separator)^1)
+local ligsplitter = lpeg.Ct(other * (separator * other)^0)
+
+--~ print(table.serialize(lpegmatch(ligsplitter,"this")))
+--~ print(table.serialize(lpegmatch(ligsplitter,"this.that")))
+--~ print(table.serialize(lpegmatch(ligsplitter,"japan1.123")))
+--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more")))
+--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that")))
+
+fonts.map.add_to_unicode = function(data,filename)
+ local unicodes = data.luatex and data.luatex.unicodes
+ if not unicodes then
+ return
+ end
+ -- we need to move this code
+ unicodes['space'] = unicodes['space'] or 32
+ unicodes['hyphen'] = unicodes['hyphen'] or 45
+ unicodes['zwj'] = unicodes['zwj'] or 0x200D
+ unicodes['zwnj'] = unicodes['zwnj'] or 0x200C
+ -- the tounicode mapping is sparse and only needed for alternatives
+ local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.private, format("%04X",utfbyte("?"))
+ data.luatex.tounicode, data.luatex.originals = tounicode, originals
+ local lumunic, uparser, oparser
+ if false then -- will become an option
+ lumunic = load_lum_table(filename)
+ lumunic = lumunic and lumunic.tounicode
+ end
+ local cidinfo, cidnames, cidcodes = data.cidinfo
+ local usedmap = cidinfo and cidinfo.usedname
+ usedmap = usedmap and lower(usedmap)
+ usedmap = usedmap and fonts.cid.map[usedmap]
+ if usedmap then
+ oparser = usedmap and make_name_parser(cidinfo.ordering)
+ cidnames = usedmap.names
+ cidcodes = usedmap.unicodes
+ end
+ uparser = make_name_parser()
+ local aglmap = fonts.map and fonts.map.agl_to_unicode
+ for index, glyph in next, data.glyphs do
+ local name, unic = glyph.name, glyph.unicode or -1 -- play safe
+ if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
+ local unicode = (lumunic and lumunic[name]) or (aglmap and aglmap[name])
+ if unicode then
+ originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1
+ end
+ -- cidmap heuristics, beware, there is no guarantee for a match unless
+ -- the chain resolves
+ if (not unicode) and usedmap then
+ local foundindex = lpegmatch(oparser,name)
+ if foundindex then
+ unicode = cidcodes[foundindex] -- name to number
+ if unicode then
+ originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1
+ else
+ local reference = cidnames[foundindex] -- number to name
+ if reference then
+ local foundindex = lpegmatch(oparser,reference)
+ if foundindex then
+ unicode = cidcodes[foundindex]
+ if unicode then
+ originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1
+ end
+ end
+ if not unicode then
+ local foundcodes, multiple = lpegmatch(uparser,reference)
+ if foundcodes then
+ if multiple then
+ originals[index], tounicode[index], nl, unicode = foundcodes, tounicode16sequence(foundcodes), nl + 1, true
+ else
+ originals[index], tounicode[index], ns, unicode = foundcodes, tounicode16(foundcodes), ns + 1, foundcodes
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ -- a.whatever or a_b_c.whatever or a_b_c (no numbers)
+ if not unicode then
+ local split = lpegmatch(ligsplitter,name)
+ local nplit = (split and #split) or 0
+ if nplit == 0 then
+ -- skip
+ elseif nplit == 1 then
+ local base = split[1]
+ unicode = unicodes[base] or (aglmap and aglmap[base])
+ if unicode then
+ if type(unicode) == "table" then
+ unicode = unicode[1]
+ end
+ originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1
+ end
+ else
+ local t = { }
+ for l=1,nplit do
+ local base = split[l]
+ local u = unicodes[base] or (aglmap and aglmap[base])
+ if not u then
+ break
+ elseif type(u) == "table" then
+ t[#t+1] = u[1]
+ else
+ t[#t+1] = u
+ end
+ end
+ if #t > 0 then -- done then
+ originals[index], tounicode[index], nl, unicode = t, tounicode16sequence(t), nl + 1, true
+ end
+ end
+ end
+ -- last resort
+ if not unicode then
+ local foundcodes, multiple = lpegmatch(uparser,name)
+ if foundcodes then
+ if multiple then
+ originals[index], tounicode[index], nl, unicode = foundcodes, tounicode16sequence(foundcodes), nl + 1, true
+ else
+ originals[index], tounicode[index], ns, unicode = foundcodes, tounicode16(foundcodes), ns + 1, foundcodes
+ end
+ end
+ end
+ if not unicode then
+ originals[index], tounicode[index] = 0xFFFD, "FFFD"
+ end
+ end
+ end
+ if trace_unimapping then
+ for index, glyph in table.sortedhash(data.glyphs) do
+ local toun, name, unic = tounicode[index], glyph.name, glyph.unicode or -1 -- play safe
+ if toun then
+ logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X, tounicode: %s",index,name,unic,toun)
+ else
+ logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X",index,name,unic)
+ end
+ end
+ end
+ if trace_loading and (ns > 0 or nl > 0) then
+ logs.report("load otf","enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns)
+ end
+end
+
+-- the following is sort of obsolete
+--
+-- fonts.map.data = fonts.map.data or { }
+-- fonts.map.encodings = fonts.map.encodings or { }
+-- fonts.map.loaded = fonts.map.loaded or { }
+-- fonts.map.line = fonts.map.line or { }
+--
+-- function fonts.map.line.pdftex(e)
+-- if e.name and e.fontfile then
+-- local fullname = e.fullname or ""
+-- if e.slant and e.slant ~= 0 then
+-- if e.encoding then
+-- pdf.mapline(format('= %s %s "%g SlantFont" <%s <%s',e.name,fullname,e.slant,e.encoding,e.fontfile)))
+-- else
+-- pdf.mapline(format('= %s %s "%g SlantFont" <%s',e.name,fullname,e.slant,e.fontfile)))
+-- end
+-- elseif e.extend and e.extend ~= 1 and e.extend ~= 0 then
+-- if e.encoding then
+-- pdf.mapline(format('= %s %s "%g ExtendFont" <%s <%s',e.name,fullname,e.extend,e.encoding,e.fontfile)))
+-- else
+-- pdf.mapline(format('= %s %s "%g ExtendFont" <%s',e.name,fullname,e.extend,e.fontfile)))
+-- end
+-- else
+-- if e.encoding then
+-- pdf.mapline(format('= %s %s <%s <%s',e.name,fullname,e.encoding,e.fontfile)))
+-- else
+-- pdf.mapline(format('= %s %s <%s',e.name,fullname,e.fontfile)))
+-- end
+-- end
+-- else
+-- return nil
+-- end
+-- end
+--
+-- function fonts.map.flush(backend) -- will also erase the accumulated data
+-- local flushline = fonts.map.line[backend or "pdftex"] or fonts.map.line.pdftex
+-- for _, e in next, fonts.map.data do
+-- flushline(e)
+-- end
+-- fonts.map.data = { }
+-- end
+--
+-- fonts.map.line.dvips = fonts.map.line.pdftex
+-- fonts.map.line.dvipdfmx = function() end
+--
+-- function fonts.map.convert_entries(filename)
+-- if not fonts.map.loaded[filename] then
+-- fonts.map.data, fonts.map.encodings = fonts.map.load_file(filename,fonts.map.data, fonts.map.encodings)
+-- fonts.map.loaded[filename] = true
+-- end
+-- end
+--
+-- function fonts.map.load_file(filename, entries, encodings)
+-- entries = entries or { }
+-- encodings = encodings or { }
+-- local f = io.open(filename)
+-- if f then
+-- local data = f:read("*a")
+-- if data then
+-- for line in gmatch(data,"(.-)[\n\t]") do
+-- if find(line,"^[%#%%%s]") then
+-- -- print(line)
+-- else
+-- local extend, slant, name, fullname, fontfile, encoding
+-- line = gsub(line,'"(.+)"', function(s)
+-- extend = find(s,'"([^"]+) ExtendFont"')
+-- slant = find(s,'"([^"]+) SlantFont"')
+-- return ""
+-- end)
+-- if not name then
+-- -- name fullname encoding fontfile
+-- name, fullname, encoding, fontfile = match(line,"^(%S+)%s+(%S*)[%s<]+(%S*)[%s<]+(%S*)%s*$")
+-- end
+-- if not name then
+-- -- name fullname (flag) fontfile encoding
+-- name, fullname, fontfile, encoding = match(line,"^(%S+)%s+(%S*)[%d%s<]+(%S*)[%s<]+(%S*)%s*$")
+-- end
+-- if not name then
+-- -- name fontfile
+-- name, fontfile = match(line,"^(%S+)%s+[%d%s<]+(%S*)%s*$")
+-- end
+-- if name then
+-- if encoding == "" then encoding = nil end
+-- entries[name] = {
+-- name = name, -- handy
+-- fullname = fullname,
+-- encoding = encoding,
+-- fontfile = fontfile,
+-- slant = tonumber(slant),
+-- extend = tonumber(extend)
+-- }
+-- encodings[name] = encoding
+-- elseif line ~= "" then
+-- -- print(line)
+-- end
+-- end
+-- end
+-- end
+-- f:close()
+-- end
+-- return entries, encodings
+-- end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
if not modules then modules = { } end modules ['font-otf'] = {
version = 1.001,
- comment = "companion to font-ini.tex",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -4993,6 +5860,8 @@ local utf = unicode.utf8
local concat, getn, utfbyte = table.concat, table.getn, utf.byte
local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
local type, next, tonumber, tostring = type, next, tonumber, tostring
+local abs = math.abs
+local lpegmatch = lpeg.match
local trace_private = false trackers.register("otf.private", function(v) trace_private = v end)
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
@@ -5000,13 +5869,10 @@ local trace_features = false trackers.register("otf.features", function(v
local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end)
local trace_sequences = false trackers.register("otf.sequences", function(v) trace_sequences = v end)
local trace_math = false trackers.register("otf.math", function(v) trace_math = v end)
-local trace_unimapping = false trackers.register("otf.unimapping", function(v) trace_unimapping = v end)
+local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
--~ trackers.enable("otf.loading")
-local zwnj = 0x200C
-local zwj = 0x200D
-
--[[ldx--
<p>The fontforge table has organized lookups in a certain way. A first implementation
of this code was organized featurewise: information related to features was
@@ -5064,13 +5930,16 @@ otf.features.default = otf.features.default or { }
otf.enhancers = otf.enhancers or { }
otf.glists = { "gsub", "gpos" }
-otf.version = 2.628 -- beware: also sync font-mis.lua
+otf.version = 2.650 -- beware: also sync font-mis.lua
otf.pack = true -- beware: also sync font-mis.lua
otf.syncspace = true
otf.notdef = false
otf.cache = containers.define("fonts", "otf", otf.version, true)
otf.cleanup_aat = false -- only context
+local wildcard = "*"
+local default = "dflt"
+
--[[ldx--
<p>We start with a lot of tables and related functions.</p>
--ldx]]--
@@ -5086,6 +5955,7 @@ otf.tables.global_fields = table.tohash {
"names",
"unicodes",
"names",
+--~ "math",
"anchor_classes",
"kern_classes",
"gpos",
@@ -5158,7 +6028,7 @@ otf.tables.valid_fields = {
local function load_featurefile(ff,featurefile)
if featurefile then
- featurefile = resolvers.find_file(file.addsuffix(featurefile,'fea')) -- "FONTFEATURES"
+ featurefile = resolvers.find_file(file.addsuffix(featurefile,'fea'),'fea')
if featurefile and featurefile ~= "" then
if trace_loading then
logs.report("load otf", "featurefile: %s", featurefile)
@@ -5172,7 +6042,7 @@ function otf.enhance(name,data,filename,verbose)
local enhancer = otf.enhancers[name]
if enhancer then
if (verbose ~= nil and verbose) or trace_loading then
- logs.report("load otf","enhance: %s",name)
+ logs.report("load otf","enhance: %s (%s)",name,filename)
end
enhancer(data,filename)
end
@@ -5184,8 +6054,10 @@ local enhancers = {
"patch bugs",
"merge cid fonts", "prepare unicode", "cleanup ttf tables", "compact glyphs", "reverse coverage",
"cleanup aat", "enrich with features", "add some missing characters",
+ "reorganize mark classes",
"reorganize kerns", -- moved here
"flatten glyph lookups", "flatten anchor tables", "flatten feature tables",
+ "simplify glyph lookups", -- some saving
"prepare luatex tables",
"analyse features", "rehash features",
"analyse anchors", "analyse marks", "analyse unicodes", "analyse subtables",
@@ -5193,6 +6065,7 @@ local enhancers = {
"share widths",
"strip not needed data",
"migrate metadata",
+ "check math parameters",
}
function otf.load(filename,format,sub,featurefile)
@@ -5209,7 +6082,7 @@ function otf.load(filename,format,sub,featurefile)
local data = containers.read(otf.cache(), hash)
local size = lfs.attributes(filename,"size") or 0
if not data or data.verbose ~= fonts.verbose or data.size ~= size then
- logs.report("load otf","loading: %s",filename)
+ logs.report("load otf","loading: %s (hash: %s)",filename,hash)
local ff, messages
if sub then
ff, messages = fontloader.open(filename,sub)
@@ -5217,9 +6090,15 @@ function otf.load(filename,format,sub,featurefile)
ff, messages = fontloader.open(filename)
end
if trace_loading and messages and #messages > 0 then
- for m=1,#messages do
- logs.report("load otf","warning: %s",messages[m])
+ if type(messages) == "string" then
+ logs.report("load otf","warning: %s",messages)
+ else
+ for m=1,#messages do
+ logs.report("load otf","warning: %s",tostring(messages[m]))
+ end
end
+ else
+ logs.report("load otf","font loaded okay")
end
if ff then
load_featurefile(ff,featurefile)
@@ -5230,6 +6109,7 @@ function otf.load(filename,format,sub,featurefile)
logs.report("load otf","enhancing ...")
for e=1,#enhancers do
otf.enhance(enhancers[e],data,filename)
+ io.flush() -- we want instant messages
end
if otf.pack and not fonts.verbose then
otf.enhance("pack",data,filename)
@@ -5249,6 +6129,9 @@ function otf.load(filename,format,sub,featurefile)
end
end
if data then
+ if trace_defining then
+ logs.report("define font","loading from cache: %s",hash)
+ end
otf.enhance("unpack",data,filename,false) -- no message here
otf.add_dimensions(data)
if trace_sequences then
@@ -5336,6 +6219,29 @@ end
-- todo: normalize, design_size => designsize
+otf.enhancers["reorganize mark classes"] = function(data,filename)
+ if data.mark_classes then
+ local unicodes = data.luatex.unicodes
+ local reverse = { }
+ for name, class in next, data.mark_classes do
+ local t = { }
+ for s in gmatch(class,"[^ ]+") do
+ local us = unicodes[s]
+ if type(us) == "table" then
+ for u=1,#us do
+ t[us[u]] = true
+ end
+ else
+ t[us] = true
+ end
+ end
+ reverse[name] = t
+ end
+ data.luatex.markclasses = reverse
+ data.mark_classes = nil
+ end
+end
+
otf.enhancers["prepare luatex tables"] = function(data,filename)
data.luatex = data.luatex or { }
local luatex = data.luatex
@@ -5447,140 +6353,7 @@ otf.enhancers["analyse marks"] = function(data,filename)
end
end
-local separator = lpeg.S("_.")
-local other = lpeg.C((1 - separator)^1)
-local ligsplitter = lpeg.Ct(other * (separator * other)^0)
-
---~ print(table.serialize(ligsplitter:match("this")))
---~ print(table.serialize(ligsplitter:match("this.that")))
---~ print(table.serialize(ligsplitter:match("japan1.123")))
---~ print(table.serialize(ligsplitter:match("such_so_more")))
---~ print(table.serialize(ligsplitter:match("such_so_more.that")))
-
-otf.enhancers["analyse unicodes"] = function(data,filename)
- local tounicode16, tounicode16sequence = fonts.map.tounicode16, fonts.map.tounicode16sequence
- local unicodes = data.luatex.unicodes
- -- we need to move this code
- unicodes['space'] = unicodes['space'] or 32 -- handly later on
- unicodes['hyphen'] = unicodes['hyphen'] or 45 -- handly later on
- unicodes['zwj'] = unicodes['zwj'] or zwj -- handly later on
- unicodes['zwnj'] = unicodes['zwnj'] or zwnj -- handly later on
- -- the tounicode mapping is sparse and only needed for alternatives
- local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.private, format("%04X",utfbyte("?"))
- data.luatex.tounicode, data.luatex.originals = tounicode, originals
- local lumunic, uparser, oparser
- if false then -- will become an option
- lumunic = fonts.map.load_lum_table(filename)
- lumunic = lumunic and lumunic.tounicode
- end
- local cidinfo, cidnames, cidcodes = data.cidinfo
- local usedmap = cidinfo and cidinfo.usedname
- usedmap = usedmap and fonts.cid.map[usedmap]
- if usedmap then
- oparser = usedmap and fonts.map.make_name_parser(cidinfo.ordering)
- cidnames = usedmap.names
- cidcodes = usedmap.unicodes
- end
- uparser = fonts.map.make_name_parser()
- for index, glyph in next, data.glyphs do
- local name, unic = glyph.name, glyph.unicode or -1 -- play safe
- if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
- local unicode = lumunic and lumunic[name]
- if unicode then
- originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1
- end
- -- cidmap heuristics, beware, there is no guarantee for a match unless
- -- the chain resolves
- if not unicode and usedmap then
- local foundindex = oparser:match(name)
- if foundindex then
- unicode = cidcodes[foundindex] -- name to number
- if not unicode then
- local reference = cidnames[foundindex] -- number to name
- if reference then
- local foundindex = oparser:match(reference)
- if foundindex then
- unicode = cidcodes[foundindex]
- if unicode then
- originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1
- end
- end
- if not unicode then
- local foundcodes, multiple = uparser:match(reference)
- if foundcodes then
- if multiple then
- originals[index], tounicode[index], nl, unicode = foundcodes, tounicode16sequence(foundcodes), nl + 1, true
- else
- originals[index], tounicode[index], ns, unicode = foundcodes, tounicode16(foundcodes), ns + 1, foundcodes
- end
- end
- end
- end
- end
- end
- end
- -- a.whatever or a_b_c.whatever or a_b_c (no numbers)
- if not unicode then
- local split = ligsplitter:match(name)
- local nplit = (split and #split) or 0
- if nplit == 0 then
- -- skip
- elseif nplit == 1 then
- unicode = unicodes[split[1]]
- if unicode then
- if type(unicode) == "table" then
- unicode = unicode[1]
- end
- originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1
- end
- else
- local done = true
- for l=1,nplit do
- local u = unicodes[split[l]]
- if not u then
- done = false
- break
- elseif type(u) == "table" then
- split[l] = u[1]
- else
- split[l] = u
- end
- end
- if done then
- originals[index], tounicode[index], nl, unicode = split, tounicode16sequence(split), nl + 1, true
- end
- end
- end
- -- last resort
- if not unicode then
- local foundcodes, multiple = uparser:match(name)
- if foundcodes then
- if multiple then
- originals[index], tounicode[index], nl, unicode = foundcodes, tounicode16sequence(foundcodes), nl + 1, true
- else
- originals[index], tounicode[index], ns, unicode = foundcodes, tounicode16(foundcodes), ns + 1, foundcodes
- end
- end
- end
- if not unicode then
- originals[index], tounicode[index] = 0xFFFD, "FFFD"
- end
- end
- end
- if trace_unimapping then
- for index, glyph in table.sortedpairs(data.glyphs) do
- local toun, name, unic = tounicode[index], glyph.name, glyph.unicode or -1 -- play safe
- if toun then
- logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X, tounicode: %s",index,name,unic,toun)
- else
- logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X",index,name,unic)
- end
- end
- end
- if trace_loading and (ns > 0 or nl > 0) then
- logs.report("load otf","enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns)
- end
-end
+otf.enhancers["analyse unicodes"] = fonts.map.add_to_unicode
otf.enhancers["analyse subtables"] = function(data,filename)
data.luatex = data.luatex or { }
@@ -5640,8 +6413,11 @@ otf.enhancers["analyse subtables"] = function(data,filename)
(flags.ignorecombiningmarks and "mark") or false,
(flags.ignoreligatures and "ligature") or false,
(flags.ignorebaseglyphs and "base") or false,
- flags.r2l or false
+ flags.r2l or false,
}
+ if flags.mark_class then
+ gk.markclass = luatex.markclasses[flags.mark_class]
+ end
end
end
end
@@ -5650,7 +6426,10 @@ end
otf.enhancers["merge cid fonts"] = function(data,filename)
-- we can also move the names to data.luatex.names which might
-- save us some more memory (at the cost of harder tracing)
- if data.subfonts and table.is_empty(data.glyphs) then
+ if data.subfonts then
+ if data.glyphs and next(data.glyphs) then
+ logs.report("load otf","replacing existing glyph table due to subfonts")
+ end
local cidinfo = data.cidinfo
local verbose = fonts.verbose
if cidinfo.registry then
@@ -5738,7 +6517,7 @@ otf.enhancers["prepare unicode"] = function(data,filename)
end
end
end
- -- beware: the indeces table is used to initialize the tfm table
+ -- beware: the indices table is used to initialize the tfm table
for unicode, index in next, mapmap do
if not internals[index] then
local name = glyphs[index].name
@@ -5934,6 +6713,129 @@ end
-- kern: ttf has a table with kerns
+--~ otf.enhancers["reorganize kerns"] = function(data,filename)
+--~ local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes
+--~ local mkdone = false
+--~ for index, glyph in next, data.glyphs do
+--~ if glyph.kerns then
+--~ local mykerns = { }
+--~ for k,v in next, glyph.kerns do
+--~ local vc, vo, vl = v.char, v.off, v.lookup
+--~ if vc and vo and vl then -- brrr, wrong! we miss the non unicode ones
+--~ local uvc = unicodes[vc]
+--~ if not uvc then
+--~ if trace_loading then
+--~ logs.report("load otf","problems with unicode %s of kern %s at glyph %s",vc,k,index)
+--~ end
+--~ else
+--~ if type(vl) ~= "table" then
+--~ vl = { vl }
+--~ end
+--~ for l=1,#vl do
+--~ local vll = vl[l]
+--~ local mkl = mykerns[vll]
+--~ if not mkl then
+--~ mkl = { }
+--~ mykerns[vll] = mkl
+--~ end
+--~ if type(uvc) == "table" then
+--~ for u=1,#uvc do
+--~ mkl[uvc[u]] = vo
+--~ end
+--~ else
+--~ mkl[uvc] = vo
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ glyph.mykerns = mykerns
+--~ glyph.kerns = nil -- saves space and time
+--~ mkdone = true
+--~ end
+--~ end
+--~ if trace_loading and mkdone then
+--~ logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables")
+--~ end
+--~ if data.kerns then
+--~ if trace_loading then
+--~ logs.report("load otf", "removing global 'kern' table")
+--~ end
+--~ data.kerns = nil
+--~ end
+--~ local dgpos = data.gpos
+--~ if dgpos then
+--~ for gp=1,#dgpos do
+--~ local gpos = dgpos[gp]
+--~ local subtables = gpos.subtables
+--~ if subtables then
+--~ for s=1,#subtables do
+--~ local subtable = subtables[s]
+--~ local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
+--~ if kernclass then -- the next one is quite slow
+--~ for k=1,#kernclass do
+--~ local kcl = kernclass[k]
+--~ local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular
+--~ if type(lookups) ~= "table" then
+--~ lookups = { lookups }
+--~ end
+--~ for l=1,#lookups do
+--~ local lookup = lookups[l]
+--~ -- weird, as maxfirst and maxseconds can have holes
+--~ local maxfirsts, maxseconds = getn(firsts), getn(seconds)
+--~ if trace_loading then
+--~ logs.report("load otf", "adding kernclass %s with %s times %s pairs",lookup, maxfirsts, maxseconds)
+--~ end
+--~ for fk, fv in next, firsts do
+--~ for first in gmatch(fv,"[^ ]+") do
+--~ local first_unicode = unicodes[first]
+--~ if type(first_unicode) == "number" then
+--~ first_unicode = { first_unicode }
+--~ end
+--~ for f=1,#first_unicode do
+--~ local glyph = glyphs[mapmap[first_unicode[f]]]
+--~ if glyph then
+--~ local mykerns = glyph.mykerns
+--~ if not mykerns then
+--~ mykerns = { } -- unicode indexed !
+--~ glyph.mykerns = mykerns
+--~ end
+--~ local lookupkerns = mykerns[lookup]
+--~ if not lookupkerns then
+--~ lookupkerns = { }
+--~ mykerns[lookup] = lookupkerns
+--~ end
+--~ for sk, sv in next, seconds do
+--~ local offset = offsets[(fk-1) * maxseconds + sk]
+--~ --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk]
+--~ for second in gmatch(sv,"[^ ]+") do
+--~ local second_unicode = unicodes[second]
+--~ if type(second_unicode) == "number" then
+--~ lookupkerns[second_unicode] = offset
+--~ else
+--~ for s=1,#second_unicode do
+--~ lookupkerns[second_unicode[s]] = offset
+--~ end
+--~ end
+--~ end
+--~ end
+--~ elseif trace_loading then
+--~ logs.report("load otf", "no glyph data for U+%04X", first_unicode[f])
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ subtable.comment = "The kernclass table is merged into mykerns in the indexed glyph tables."
+--~ subtable.kernclass = { }
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+
otf.enhancers["reorganize kerns"] = function(data,filename)
local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes
local mkdone = false
@@ -5986,6 +6888,9 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
end
local dgpos = data.gpos
if dgpos then
+ local separator = lpeg.P(" ")
+ local other = ((1 - separator)^0) / unicodes
+ local splitter = lpeg.Ct(other * (separator * other)^0)
for gp=1,#dgpos do
local gpos = dgpos[gp]
local subtables = gpos.subtables
@@ -5993,54 +6898,75 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
for s=1,#subtables do
local subtable = subtables[s]
local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
- if kernclass then
+ if kernclass then -- the next one is quite slow
for k=1,#kernclass do
local kcl = kernclass[k]
local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular
if type(lookups) ~= "table" then
lookups = { lookups }
end
+ local split = { }
for l=1,#lookups do
local lookup = lookups[l]
+ -- weird, as maxfirst and maxseconds can have holes, first seems to be indexed, seconds starts at 2
local maxfirsts, maxseconds = getn(firsts), getn(seconds)
+ for _, s in next, firsts do
+ split[s] = split[s] or lpegmatch(splitter,s)
+ end
+ for _, s in next, seconds do
+ split[s] = split[s] or lpegmatch(splitter,s)
+ end
if trace_loading then
logs.report("load otf", "adding kernclass %s with %s times %s pairs",lookup, maxfirsts, maxseconds)
end
- for fk, fv in next, firsts do
- for first in gmatch(fv,"[^ ]+") do
- local first_unicode = unicodes[first]
- if type(first_unicode) == "number" then
- first_unicode = { first_unicode }
+ local function do_it(fk,first_unicode)
+ local glyph = glyphs[mapmap[first_unicode]]
+ if glyph then
+ local mykerns = glyph.mykerns
+ if not mykerns then
+ mykerns = { } -- unicode indexed !
+ glyph.mykerns = mykerns
end
- for f=1,#first_unicode do
- local glyph = glyphs[mapmap[first_unicode[f]]]
- if glyph then
- local mykerns = glyph.mykerns
- if not mykerns then
- mykerns = { } -- unicode indexed !
- glyph.mykerns = mykerns
- end
- local lookupkerns = mykerns[lookup]
- if not lookupkerns then
- lookupkerns = { }
- mykerns[lookup] = lookupkerns
- end
- for sk, sv in next, seconds do
- local offset = offsets[(fk-1) * maxseconds + sk]
- --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk]
- for second in gmatch(sv,"[^ ]+") do
- local second_unicode = unicodes[second]
- if type(second_unicode) == "number" then
- lookupkerns[second_unicode] = offset
- else
- for s=1,#second_unicode do
- lookupkerns[second_unicode[s]] = offset
- end
+ local lookupkerns = mykerns[lookup]
+ if not lookupkerns then
+ lookupkerns = { }
+ mykerns[lookup] = lookupkerns
+ end
+ local baseoffset = (fk-1) * maxseconds
+ for sk=2,maxseconds do
+ local sv = seconds[sk]
+ local offset = offsets[baseoffset + sk]
+ --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk]
+ local splt = split[sv]
+ if splt then
+ for i=1,#splt do
+ local second_unicode = splt[i]
+ if tonumber(second_unicode) then
+ lookupkerns[second_unicode] = offset
+ else
+ for s=1,#second_unicode do
+ lookupkerns[second_unicode[s]] = offset
end
end
end
- elseif trace_loading then
- logs.report("load otf", "no glyph data for U+%04X", first_unicode[f])
+ end
+ end
+ elseif trace_loading then
+ logs.report("load otf", "no glyph data for U+%04X", first_unicode)
+ end
+ end
+ for fk=1,#firsts do
+ local fv = firsts[fk]
+ local splt = split[fv]
+ if splt then
+ for i=1,#splt do
+ local first_unicode = splt[i]
+ if tonumber(first_unicode) then
+ do_it(fk,first_unicode)
+ else
+ for f=1,#first_unicode do
+ do_it(fk,first_unicode[f])
+ end
end
end
end
@@ -6114,10 +7040,31 @@ otf.enhancers["migrate metadata"] = function(data,filename)
metadata.charwidth = pfminfo and pfminfo.avgwidth
end
+local private_math_parameters = {
+ "FractionDelimiterSize",
+ "FractionDelimiterDisplayStyleSize",
+}
+
+otf.enhancers["check math parameters"] = function(data,filename)
+ local mathdata = data.metadata.math
+ if mathdata then
+ for m=1,#private_math_parameters do
+ local pmp = private_math_parameters[m]
+ if not mathdata[pmp] then
+ if trace_loading then
+ logs.report("load otf", "setting math parameter '%s' to 0", pmp)
+ end
+ mathdata[pmp] = 0
+ end
+ end
+ end
+end
+
otf.enhancers["flatten glyph lookups"] = function(data,filename)
for k, v in next, data.glyphs do
- if v.lookups then
- for kk, vv in next, v.lookups do
+ local lookups = v.lookups
+ if lookups then
+ for kk, vv in next, lookups do
for kkk=1,#vv do
local vvv = vv[kkk]
local s = vvv.specification
@@ -6167,6 +7114,31 @@ otf.enhancers["flatten glyph lookups"] = function(data,filename)
end
end
+otf.enhancers["simplify glyph lookups"] = function(data,filename)
+ for k, v in next, data.glyphs do
+ local lookups = v.lookups
+ if lookups then
+ local slookups, mlookups
+ for kk, vv in next, lookups do
+ if #vv == 1 then
+ if not slookups then
+ slookups = { }
+ v.slookups = slookups
+ end
+ slookups[kk] = vv[1]
+ else
+ if not mlookups then
+ mlookups = { }
+ v.mlookups = mlookups
+ end
+ mlookups[kk] = vv
+ end
+ end
+ v.lookups = nil
+ end
+ end
+end
+
otf.enhancers["flatten anchor tables"] = function(data,filename)
for k, v in next, data.glyphs do
if v.anchors then
@@ -6234,10 +7206,12 @@ function otf.features.register(name,default)
otf.features.default[name] = default
end
+-- for context this will become a task handler
+
function otf.set_features(tfmdata,features)
local processes = { }
- if not table.is_empty(features) then
- local lists = {
+ if features and next(features) then
+ local lists = { -- why local
fonts.triggers,
fonts.processors,
fonts.manipulators,
@@ -6274,7 +7248,7 @@ function otf.set_features(tfmdata,features)
end
end
end
- local fm = fonts.methods[mode]
+ local fm = fonts.methods[mode] -- todo: zonder node/mode otf/...
if fm then
local fmotf = fm.otf
if fmotf then
@@ -6311,14 +7285,14 @@ function otf.otf_to_tfm(specification)
--~ print(cache_id)
if not tfmdata then
local otfdata = otf.load(filename,format,sub,features and features.featurefile)
- if not table.is_empty(otfdata) then
+ if otfdata and next(otfdata) then
otfdata.shared = otfdata.shared or {
featuredata = { },
anchorhash = { },
initialized = false,
}
tfmdata = otf.copy_to_tfm(otfdata,cache_id)
- if not table.is_empty(tfmdata) then
+ if tfmdata and next(tfmdata) then
tfmdata.unique = tfmdata.unique or { }
tfmdata.shared = tfmdata.shared or { } -- combine
local shared = tfmdata.shared
@@ -6371,6 +7345,11 @@ end
-- we cannot share descriptions as virtual fonts might extend them (ok, we could
-- use a cache with a hash
+fonts.formats.dfont = "truetype"
+fonts.formats.ttc = "truetype"
+fonts.formats.ttf = "truetype"
+fonts.formats.otf = "opentype"
+
function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder the tma to unicode (nasty due to one->many)
if data then
local glyphs, pfminfo, metadata = data.glyphs or { }, data.pfminfo or { }, data.metadata or { }
@@ -6378,14 +7357,11 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
local unicodes = luatex.unicodes -- names to unicodes
local indices = luatex.indices
local characters, parameters, math_parameters, descriptions = { }, { }, { }, { }
- local tfm = {
- characters = characters,
- parameters = parameters,
- math_parameters = math_parameters,
- descriptions = descriptions,
- indices = indices,
- unicodes = unicodes,
- }
+ local designsize = metadata.designsize or metadata.design_size or 100
+ if designsize == 0 then
+ designsize = 100
+ end
+ local spaceunits = 500
-- indices maps from unicodes to indices
for u, i in next, indices do
characters[u] = { } -- we need this because for instance we add protruding info and loop over characters
@@ -6407,7 +7383,7 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
local variants = m.horiz_variants
if variants then
local c = char
- for n in variants:gmatch("[^ ]+") do
+ for n in gmatch(variants,"[^ ]+") do
local un = unicodes[n]
if un and u ~= un then
c.next = un
@@ -6419,7 +7395,7 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
local variants = m.vert_variants
if variants then
local c = char
- for n in variants:gmatch("[^ ]+") do
+ for n in gmatch(variants,"[^ ]+") do
local un = unicodes[n]
if un and u ~= un then
c.next = un
@@ -6427,6 +7403,7 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
end
end
c.vert_variants = m.vert_parts
+ c.vert_italic_correction = m.vert_italic_correction
end
end
local kerns = m.kerns
@@ -6437,64 +7414,49 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
end
end
-- end math
- local designsize = metadata.designsize or metadata.design_size or 100
- if designsize == 0 then
- designsize = 100
- end
- local spaceunits = 500
- tfm.units = metadata.units_per_em or 1000
- -- we need a runtime lookup because of running from cdrom or zip, brrr
- tfm.filename = resolvers.findbinfile(luatex.filename,"") or luatex.filename
- tfm.fullname = metadata.fontname or metadata.fullname
- tfm.psname = tfm.fullname
- tfm.encodingbytes = 2
- tfm.cidinfo = data.cidinfo
- tfm.cidinfo.registry = tfm.cidinfo.registry or ""
- tfm.type = "real"
- tfm.stretch = 0 -- stretch
- tfm.slant = 0 -- slant
- tfm.direction = 0
- tfm.boundarychar_label = 0
- tfm.boundarychar = 65536
- tfm.designsize = (designsize/10)*65536
- tfm.spacer = "500 units"
- local endash, emdash = 0x20, 0x2014 -- unicodes['space'], unicodes['emdash']
+ local endash, emdash, space = 0x20, 0x2014, "space" -- unicodes['space'], unicodes['emdash']
if metadata.isfixedpitch then
if descriptions[endash] then
- spaceunits, tfm.spacer = descriptions[endash].width, "space"
+ spaceunits, spacer = descriptions[endash].width, "space"
end
if not spaceunits and descriptions[emdash] then
- spaceunits, tfm.spacer = descriptions[emdash].width, "emdash"
+ spaceunits, spacer = descriptions[emdash].width, "emdash"
end
if not spaceunits and metadata.charwidth then
- spaceunits, tfm.spacer = metadata.charwidth, "charwidth"
+ spaceunits, spacer = metadata.charwidth, "charwidth"
end
else
if descriptions[endash] then
- spaceunits, tfm.spacer = descriptions[endash].width, "space"
+ spaceunits, spacer = descriptions[endash].width, "space"
end
if not spaceunits and descriptions[emdash] then
- spaceunits, tfm.spacer = descriptions[emdash].width/2, "emdash/2"
+ spaceunits, spacer = descriptions[emdash].width/2, "emdash/2"
end
if not spaceunits and metadata.charwidth then
- spaceunits, tfm.spacer = metadata.charwidth, "charwidth"
+ spaceunits, spacer = metadata.charwidth, "charwidth"
end
end
spaceunits = tonumber(spaceunits) or tfm.units/2 -- 500 -- brrr
+ -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?)
+ local filename = fonts.tfm.checked_filename(luatex)
+ local fontname = metadata.fontname
+ local fullname = metadata.fullname or fontname
+ local cidinfo = data.cidinfo
+ local units = metadata.units_per_em or 1000
+ --
+ cidinfo.registry = cidinfo and cidinfo.registry or "" -- weird here, fix upstream
+ --
parameters.slant = 0
- parameters.space = spaceunits -- 3.333 (cmr10)
- parameters.space_stretch = tfm.units/2 -- 500 -- 1.666 (cmr10)
- parameters.space_shrink = 1*tfm.units/3 -- 333 -- 1.111 (cmr10)
- parameters.x_height = 2*tfm.units/5 -- 400
- parameters.quad = tfm.units -- 1000
- if spaceunits < 2*tfm.units/5 then
+ parameters.space = spaceunits -- 3.333 (cmr10)
+ parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10)
+ parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10)
+ parameters.x_height = 2*units/5 -- 400
+ parameters.quad = units -- 1000
+ if spaceunits < 2*units/5 then
-- todo: warning
end
local italicangle = metadata.italicangle
- tfm.ascender = math.abs(metadata.ascent or 0)
- tfm.descender = math.abs(metadata.descent or 0)
if italicangle then -- maybe also in afm _
- tfm.italicangle = italicangle
parameters.slant = parameters.slant - math.round(math.tan(italicangle*math.pi/180))
end
if metadata.isfixedpitch then
@@ -6516,8 +7478,34 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
end
end
end
- -- [6]
- return tfm
+ --
+ return {
+ characters = characters,
+ parameters = parameters,
+ math_parameters = math_parameters,
+ descriptions = descriptions,
+ indices = indices,
+ unicodes = unicodes,
+ type = "real",
+ direction = 0,
+ boundarychar_label = 0,
+ boundarychar = 65536,
+ designsize = (designsize/10)*65536,
+ spacer = "500 units",
+ encodingbytes = 2,
+ filename = filename,
+ fontname = fontname,
+ fullname = fullname,
+ psname = fontname or fullname,
+ name = filename or fullname,
+ units = units,
+ format = fonts.fontformat(filename,"opentype"),
+ cidinfo = cidinfo,
+ ascender = abs(metadata.ascent or 0),
+ descender = abs(metadata.descent or 0),
+ spacer = spacer,
+ italicangle = italicangle,
+ }
else
return nil
end
@@ -6534,6 +7522,7 @@ function tfm.read_from_open_type(specification)
local s = specification.size
local m = otfdata.metadata.math
if m then
+ -- this will move to a function
local f = specification.features
if f then
local f = f.normal
@@ -6561,25 +7550,16 @@ function tfm.read_from_open_type(specification)
end
end
end
- tfmtable = tfm.scale(tfmtable,s)
- -- here we resolve the name; file can be relocated, so this info is not in the cache
- local filename = (otfdata and otfdata.luatex and otfdata.luatex.filename) or specification.filename
- if not filename then
- -- try to locate anyway and set otfdata.luatex.filename
- end
- if filename then
- tfmtable.encodingbytes = 2
- tfmtable.filename = resolvers.findbinfile(filename,"") or filename
- tfmtable.fullname = tfmtable.fullname or otfdata.metadata.fontname or otfdata.metadata.fullname
- local order = otfdata and otfdata.metadata.order2
- if order == 0 then
- tfmtable.format = 'opentype'
- elseif order == 1 then
- tfmtable.format = 'truetype'
- else
- tfmtable.format = specification.format
+ tfmtable = tfm.scale(tfmtable,s,specification.relativeid)
+ if tfm.fontname_mode == "specification" then
+ -- not to be used in context !
+ local specname = specification.specification
+ if specname then
+ tfmtable.name = specname
+ if trace_defining then
+ logs.report("define font","overloaded fontname: '%s'",specname)
+ end
end
- tfmtable.name = tfmtable.filename or tfmtable.fullname
end
fonts.logger.save(tfmtable,file.extname(specification.filename),specification)
end
@@ -6587,13 +7567,46 @@ function tfm.read_from_open_type(specification)
return tfmtable
end
+-- helpers
+
+function otf.collect_lookups(otfdata,kind,script,language)
+ -- maybe store this in the font
+ local sequences = otfdata.luatex.sequences
+ if sequences then
+ local featuremap, featurelist = { }, { }
+ for s=1,#sequences do
+ local sequence = sequences[s]
+ local features = sequence.features
+ features = features and features[kind]
+ features = features and (features[script] or features[default] or features[wildcard])
+ features = features and (features[language] or features[default] or features[wildcard])
+ if features then
+ local subtables = sequence.subtables
+ if subtables then
+ for s=1,#subtables do
+ local ss = subtables[s]
+ if not featuremap[s] then
+ featuremap[ss] = true
+ featurelist[#featurelist+1] = ss
+ end
+ end
+ end
+ end
+ end
+ if #featurelist > 0 then
+ return featuremap, featurelist
+ end
+ end
+ return nil, nil
+end
+
end -- closure
do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['font-otd'] = {
version = 1.001,
- comment = "companion to font-ini.tex",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -6676,7 +7689,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['font-oti'] = {
version = 1.001,
- comment = "companion to font-ini.tex",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -6738,7 +7751,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['font-otb'] = {
version = 1.001,
- comment = "companion to font-ini.tex",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -6747,6 +7760,7 @@ if not modules then modules = { } end modules ['font-otb'] = {
local concat = table.concat
local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
local type, next, tonumber, tostring = type, next, tonumber, tostring
+local lpegmatch = lpeg.match
local otf = fonts.otf
local tfm = fonts.tfm
@@ -6807,7 +7821,7 @@ local function resolve_ligatures(tfmdata,ligatures,kind)
for k,v in next, ligatures do
local lig = v[1]
if not done[lig] then
- local ligs = split_at_space:match(lig)
+ local ligs = lpegmatch(split_at_space,lig)
if #ligs == 2 then
local uc = v[2]
local c, f, s = characters[uc], ligs[1], ligs[2]
@@ -6871,43 +7885,12 @@ local function resolve_ligatures(tfmdata,ligatures,kind)
end
end
-local function collect_lookups(otfdata,kind,script,language)
- -- maybe store this in the font
- local sequences = otfdata.luatex.sequences
- if sequences then
- local featuremap, featurelist = { }, { }
- for s=1,#sequences do
- local sequence = sequences[s]
- local features = sequence.features
- features = features and features[kind]
- features = features and (features[script] or features[default] or features[wildcard])
- features = features and (features[language] or features[default] or features[wildcard])
- if features then
- local subtables = sequence.subtables
- if subtables then
- for s=1,#subtables do
- local ss = subtables[s]
- if not featuremap[s] then
- featuremap[ss] = true
- featurelist[#featurelist+1] = ss
- end
- end
- end
- end
- end
- if #featurelist > 0 then
- return featuremap, featurelist
- end
- end
- return nil, nil
-end
-
local splitter = lpeg.splitat(" ")
function prepare_base_substitutions(tfmdata,kind,value) -- we can share some code with the node features
if value then
local otfdata = tfmdata.shared.otfdata
- local validlookups, lookuplist = collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
+ local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
if validlookups then
local ligatures = { }
local unicodes = tfmdata.unicodes -- names to unicodes
@@ -6915,9 +7898,84 @@ function prepare_base_substitutions(tfmdata,kind,value) -- we can share some cod
local characters = tfmdata.characters
local descriptions = tfmdata.descriptions
local changed = tfmdata.changed
+ --
+ local actions = {
+ substitution = function(p,lookup,k,glyph,unicode)
+ local pv = p[2] -- p.variant
+ if pv then
+ local upv = unicodes[pv]
+ if upv then
+ if type(upv) == "table" then
+ upv = upv[1]
+ end
+ if characters[upv] then
+ if trace_baseinit and trace_singles then
+ logs.report("define otf","%s: base substitution %s => %s",cref(kind,lookup),gref(descriptions,k),gref(descriptions,upv))
+ end
+ changed[k] = upv
+ end
+ end
+ end
+ end,
+ alternate = function(p,lookup,k,glyph,unicode)
+ local pc = p[2] -- p.components
+ if pc then
+ -- a bit optimized ugliness
+ if value == 1 then
+ pc = lpegmatch(splitter,pc)
+ elseif value == 2 then
+ local a, b = lpegmatch(splitter,pc)
+ pc = b or a
+ else
+ pc = { lpegmatch(splitter,pc) }
+ pc = pc[value] or pc[#pc]
+ end
+ if pc then
+ local upc = unicodes[pc]
+ if upc then
+ if type(upc) == "table" then
+ upc = upc[1]
+ end
+ if characters[upc] then
+ if trace_baseinit and trace_alternatives then
+ logs.report("define otf","%s: base alternate %s %s => %s",cref(kind,lookup),tostring(value),gref(descriptions,k),gref(descriptions,upc))
+ end
+ changed[k] = upc
+ end
+ end
+ end
+ end
+ end,
+ ligature = function(p,lookup,k,glyph,unicode)
+ local pc = p[2]
+ if pc then
+ if trace_baseinit and trace_ligatures then
+ local upc = { lpegmatch(splitter,pc) }
+ for i=1,#upc do upc[i] = unicodes[upc[i]] end
+ -- we assume that it's no table
+ logs.report("define otf","%s: base ligature %s => %s",cref(kind,lookup),gref(descriptions,upc),gref(descriptions,k))
+ end
+ ligatures[#ligatures+1] = { pc, k }
+ end
+ end,
+ }
+ --
for k,c in next, characters do
local glyph = descriptions[k]
- local lookups = glyph.lookups
+ local lookups = glyph.slookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookup = lookuplist[l]
+ local p = lookups[lookup]
+ if p then
+ local a = actions[p[1]]
+ if a then
+ a(p,lookup,k,glyph,unicode)
+ end
+ end
+ end
+ end
+ local lookups = glyph.mlookups
if lookups then
for l=1,#lookuplist do
local lookup = lookuplist[l]
@@ -6925,62 +7983,9 @@ function prepare_base_substitutions(tfmdata,kind,value) -- we can share some cod
if ps then
for i=1,#ps do
local p = ps[i]
- local t = p[1]
- if t == 'substitution' then
- local pv = p[2] -- p.variant
- if pv then
- local upv = unicodes[pv]
- if upv then
- if type(upv) == "table" then
- upv = upv[1]
- end
- if characters[upv] then
- if trace_baseinit and trace_singles then
- logs.report("define otf","%s: base substitution %s => %s",cref(kind,lookup),gref(descriptions,k),gref(descriptions,upv))
- end
- changed[k] = upv
- end
- end
- end
- elseif t == 'alternate' then
- local pc = p[2] -- p.components
- if pc then
- -- a bit optimized ugliness
- if value == 1 then
- pc = splitter:match(pc)
- elseif value == 2 then
- local a, b = splitter:match(pc)
- pc = b or a
- else
- pc = { splitter:match(pc) }
- pc = pc[value] or pc[#pc]
- end
- if pc then
- local upc = unicodes[pc]
- if upc then
- if type(upc) == "table" then
- upc = upc[1]
- end
- if characters[upc] then
- if trace_baseinit and trace_alternatives then
- logs.report("define otf","%s: base alternate %s => %s",cref(kind,lookup),gref(descriptions,k),gref(descriptions,upc))
- end
- changed[k] = upc
- end
- end
- end
- end
- elseif t == 'ligature' and not changed[k] then
- local pc = p[2]
- if pc then
- if trace_baseinit and trace_ligatures then
- local upc = { splitter:match(pc) }
- for i=1,#upc do upc[i] = unicodes[upc[i]] end
- -- we assume that it's no table
- logs.report("define otf","%s: base ligature %s => %s",cref(kind,lookup),gref(descriptions,upc),gref(descriptions,k))
- end
- ligatures[#ligatures+1] = { pc, k }
- end
+ local a = actions[p[1]]
+ if a then
+ a(p,lookup,k,glyph,unicode)
end
end
end
@@ -6997,37 +8002,46 @@ end
local function prepare_base_kerns(tfmdata,kind,value) -- todo what kind of kerns, currently all
if value then
local otfdata = tfmdata.shared.otfdata
- local validlookups, lookuplist = collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
+ local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
if validlookups then
local unicodes = tfmdata.unicodes -- names to unicodes
local indices = tfmdata.indices
local characters = tfmdata.characters
local descriptions = tfmdata.descriptions
+ local sharedkerns = { }
for u, chr in next, characters do
local d = descriptions[u]
if d then
- local dk = d.mykerns
+ local dk = d.mykerns -- shared
if dk then
- local t, done = chr.kerns or { }, false
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local kerns = dk[lookup]
- if kerns then
- for k, v in next, kerns do
- if v ~= 0 and not t[k] then -- maybe no 0 test here
- t[k], done = v, true
- if trace_baseinit and trace_kerns then
- logs.report("define otf","%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v)
+ local s = sharedkerns[dk]
+ if s == false then
+ -- skip
+ elseif s then
+ chr.kerns = s
+ else
+ local t, done = chr.kerns or { }, false
+ for l=1,#lookuplist do
+ local lookup = lookuplist[l]
+ local kerns = dk[lookup]
+ if kerns then
+ for k, v in next, kerns do
+ if v ~= 0 and not t[k] then -- maybe no 0 test here
+ t[k], done = v, true
+ if trace_baseinit and trace_kerns then
+ logs.report("define otf","%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v)
+ end
end
end
end
end
+ if done then
+ sharedkerns[dk] = t
+ chr.kerns = t -- no empty assignments
+ else
+ sharedkerns[dk] = false
+ end
end
- if done then
- chr.kerns = t -- no empty assignments
- end
- -- elseif d.kerns then
- -- logs.report("define otf","%s: invalid mykerns for %s",cref(kind),gref(descriptions,u))
end
end
end
@@ -7042,12 +8056,13 @@ end
-- to do complete mixed runs and not run featurewise (as we did before).
local supported_gsub = {
- 'liga','dlig','rlig','hlig',
- 'pnum','onum','tnum','lnum',
+ 'liga', 'dlig', 'rlig', 'hlig',
+ 'pnum', 'onum', 'tnum', 'lnum',
'zero',
- 'smcp','cpsp','c2sc','ornm','aalt',
- 'hwid','fwid',
- 'ssty', -- math
+ 'smcp', 'cpsp', 'c2sc', 'ornm', 'aalt',
+ 'hwid', 'fwid',
+ 'ssty', 'rtlm', -- math
+-- 'tlig', 'trep',
}
local supported_gpos = {
@@ -7114,7 +8129,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['font-otn'] = {
version = 1.001,
- comment = "companion to font-ini.tex",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -7124,6 +8139,11 @@ if not modules then modules = { } end modules ['font-otn'] = {
-- much functionality could only be implemented thanks to the husayni font
-- of Idris Samawi Hamid to who we dedicate this module.
+-- I'm in the process of cleaning up the code (which happens in another
+-- file) so don't rely on things staying the same.
+
+-- some day when we can jit this, we can use more functions
+
-- we can use more lpegs when lpeg is extended with function args and so
-- resolving to unicode does not gain much
@@ -7229,9 +8249,10 @@ results in different tables.</p>
-- remark: the 'not implemented yet' variants will be done when we have fonts that use them
-- remark: we need to check what to do with discretionaries
-local concat = table.concat
+local concat, insert, remove = table.concat, table.insert, table.remove
local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
local type, next, tonumber, tostring = type, next, tonumber, tostring
+local lpegmatch = lpeg.match
local otf = fonts.otf
local tfm = fonts.tfm
@@ -7250,6 +8271,8 @@ local trace_bugs = false trackers.register("otf.bugs", function
local trace_details = false trackers.register("otf.details", function(v) trace_details = v end)
local trace_applied = false trackers.register("otf.applied", function(v) trace_applied = v end)
local trace_steps = false trackers.register("otf.steps", function(v) trace_steps = v end)
+local trace_skips = false trackers.register("otf.skips", function(v) trace_skips = v end)
+local trace_directions = false trackers.register("otf.directions", function(v) trace_directions = v end)
trackers.register("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
trackers.register("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
@@ -7273,7 +8296,7 @@ local zwj = 0x200D
local wildcard = "*"
local default = "dflt"
-local split_at_space = lpeg.Ct(lpeg.splitat(" ")) -- no trailing or multiple spaces anyway
+local split_at_space = lpeg.splitters[" "] or lpeg.Ct(lpeg.splitat(" ")) -- no trailing or multiple spaces anyway
local glyph = node.id('glyph')
local glue = node.id('glue')
@@ -7415,16 +8438,30 @@ end
local function toligature(kind,lookupname,start,stop,char,markflag,discfound) -- brr head
if start ~= stop then
+--~ if discfound then
+--~ local lignode = copy_node(start)
+--~ lignode.font = start.font
+--~ lignode.char = char
+--~ lignode.subtype = 2
+--~ start = node.do_ligature_n(start, stop, lignode)
+--~ if start.id == disc then
+--~ local prev = start.prev
+--~ start = start.next
+--~ end
if discfound then
+ -- print("start->stop",nodes.tosequence(start,stop))
local lignode = copy_node(start)
- lignode.font = start.font
- lignode.char = char
- lignode.subtype = 2
- start = node.do_ligature_n(start, stop, lignode)
- if start.id == disc then
- local prev = start.prev
- start = start.next
- end
+ lignode.font, lignode.char, lignode.subtype = start.font, char, 2
+ local next, prev = stop.next, start.prev
+ stop.next = nil
+ lignode = node.do_ligature_n(start, stop, lignode)
+ prev.next = lignode
+ if next then
+ next.prev = lignode
+ end
+ lignode.next, lignode.prev = next, prev
+ start = lignode
+ -- print("start->end",nodes.tosequence(start))
else -- start is the ligature
local deletemarks = markflag ~= "mark"
local n = copy_node(start)
@@ -7499,16 +8536,19 @@ local function alternative_glyph(start,alternatives,kind,chainname,chainlookupna
value, choice = format("first, choice %s",1), alternatives[1]
elseif value == "last" then
value, choice = format("last, choice %s",n), alternatives[n]
- elseif type(value) ~= "number" then
- value, choice = "default, choice 1", alternatives[1]
- elseif value > n then
- value, choice = format("no %s variants, taking %s",value,n), alternatives[n]
- elseif value == 0 then
- value, choice = format("choice %s (no change)",value), start.char
- elseif value < 1 then
- value, choice = format("no %s variants, taking %s",value,1), alternatives[1]
else
- value, choice = format("choice %s",value), alternatives[value]
+ value = tonumber(value)
+ if type(value) ~= "number" then
+ value, choice = "default, choice 1", alternatives[1]
+ elseif value > n then
+ value, choice = format("no %s variants, taking %s",value,n), alternatives[n]
+ elseif value == 0 then
+ value, choice = format("choice %s (no change)",value), start.char
+ elseif value < 1 then
+ value, choice = format("no %s variants, taking %s",value,1), alternatives[1]
+ else
+ value, choice = format("choice %s",value), alternatives[value]
+ end
end
if not choice then
logwarning("%s: no variant %s for %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(start.char))
@@ -7550,7 +8590,8 @@ end
function handlers.gsub_ligature(start,kind,lookupname,ligature,sequence) --or maybe pass lookup ref
local s, stop, discfound = start.next, nil, false
- if marks[start.char] then
+ local startchar = start.char
+ if marks[startchar] then
while s do
local id = s.id
if id == glyph and s.subtype<256 then
@@ -7573,7 +8614,7 @@ function handlers.gsub_ligature(start,kind,lookupname,ligature,sequence) --or ma
end
if stop and ligature[2] then
if trace_ligatures then
- local startchar, stopchar = start.char, stop.char
+ local stopchar = stop.char
start = markstoligature(kind,lookupname,start,stop,ligature[2])
logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
else
@@ -7612,7 +8653,7 @@ function handlers.gsub_ligature(start,kind,lookupname,ligature,sequence) --or ma
end
if stop and ligature[2] then
if trace_ligatures then
- local startchar, stopchar = start.char, stop.char
+ local stopchar = stop.char
start = toligature(kind,lookupname,start,stop,ligature[2],skipmark,discfound)
logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
else
@@ -7838,7 +8879,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to
if exit then
local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound)
+ logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
end
done = true
break
@@ -7866,9 +8907,9 @@ end
function handlers.gpos_single(start,kind,lookupname,kerns,sequence)
local startchar = start.char
- local dx, dy = set_pair(start,tfmdata.factor,rlmode,kerns,characters[startchar])
+ local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
- logprocess("%s: shifting single %s by (%s,%s)",pref(kind,lookupname),gref(startchar),dx,dy)
+ logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
end
return start, false
end
@@ -7897,14 +8938,14 @@ local krn = kerns[nextchar]
local a, b = krn[3], krn[4]
if a and #a > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(start,factor,rlmode,a,characters[startchar])
+ local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(snext,factor,rlmode,b,characters[nextchar])
+ local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
@@ -8490,7 +9531,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache,
if exit then
local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound)
+ logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
end
done = true
break
@@ -8518,7 +9559,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache,
return start, false
end
-function chainprocs.gpos_single(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname)
+function chainprocs.gpos_single(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname,chainindex,sequence)
-- untested
local startchar = start.char
local subtables = currentlookup.subtables
@@ -8527,9 +9568,9 @@ function chainprocs.gpos_single(start,stop,kind,chainname,currentcontext,cache,c
if kerns then
kerns = kerns[startchar]
if kerns then
- local dx, dy = set_pair(start,tfmdata.factor,rlmode,kerns,characters[startchar])
+ local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
- logprocess("%s: shifting single %s by (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy)
+ logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
end
end
end
@@ -8538,7 +9579,7 @@ end
-- when machines become faster i will make a shared function
-function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname)
+function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname,chainindex,sequence)
-- logwarning("%s: gpos_pair not yet supported",cref(kind,chainname,chainlookupname))
local snext = start.next
if snext then
@@ -8553,12 +9594,11 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur
local factor = tfmdata.factor
while snext and snext.id == glyph and snext.subtype<256 and snext.font == currentfont do
local nextchar = snext.char
-local krn = kerns[nextchar]
+ local krn = kerns[nextchar]
if not krn and marks[nextchar] then
prev = snext
snext = snext.next
else
---~ local krn = kerns[nextchar]
if not krn then
-- skip
elseif type(krn) == "table" then
@@ -8566,14 +9606,14 @@ local krn = kerns[nextchar]
local a, b = krn[3], krn[4]
if a and #a > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(start,factor,rlmode,a,characters[startchar])
+ local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(snext,factor,rlmode,b,characters[nextchar])
+ local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
@@ -8617,19 +9657,30 @@ end
-- we don't need to pass the currentcontext, saves a bit
-- make a slow variant then can be activated but with more tracing
+local function show_skip(kind,chainname,char,ck,class)
+ if ck[9] then
+ logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s (%s=>%s)",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
+end
+
local function normal_handle_contextchain(start,kind,chainname,contexts,sequence,cache)
-- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
local flags, done = sequence.flags, false
local skipmark, skipligature, skipbase = flags[1], flags[2], flags[3]
local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !)
+ local markclass = sequence.markclass -- todo, first we need a proper test
+ local skipped = false
for k=1,#contexts do
local match, current, last = true, start, start
local ck = contexts[k]
- local sequence = ck[3]
- local s = #sequence
+ local seq = ck[3]
+ local s = #seq
+ -- f..l = mid string
if s == 1 then
-- never happens
- match = current.id == glyph and current.subtype<256 and current.font == currentfont and sequence[1][current.char]
+ match = current.id == glyph and current.subtype<256 and current.font == currentfont and seq[1][current.char]
else
-- todo: better space check (maybe check for glue)
local f, l = ck[4], ck[5]
@@ -8643,7 +9694,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
-- we cannot optimize for n=2 because there can be disc nodes
-- if not someskip and n == l then
-- -- n=2 and no skips then faster loop
- -- match = last and last.id == glyph and last.subtype<256 and last.font == currentfont and sequence[n][last.char]
+ -- match = last and last.id == glyph and last.subtype<256 and last.font == currentfont and seq[n][last.char]
-- else
while n <= l do
if last then
@@ -8654,11 +9705,13 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
local ccd = descriptions[char]
if ccd then
local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase then
---~ if someskip and class == skipmark or class == skipligature or class == skipbase then
- -- skip 'm
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
last = last.next
- elseif sequence[n][char] then
+ elseif seq[n][char] then
if n < l then
last = last.next
end
@@ -8684,6 +9737,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
-- end
end
if match and f > 1 then
+ -- before
local prev = start.prev
if prev then
local n = f-1
@@ -8696,10 +9750,12 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
local ccd = descriptions[char]
if ccd then
local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase then
---~ if someskip and class == skipmark or class == skipligature or class == skipbase then
- -- skip 'm
- elseif sequence[n][char] then
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
n = n -1
else
match = false break
@@ -8712,32 +9768,33 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
end
elseif id == disc then
-- skip 'm
- elseif sequence[n][32] then
+ elseif seq[n][32] then
n = n -1
else
match = false break
end
prev = prev.prev
- elseif sequence[n][32] then
+ elseif seq[n][32] then
n = n -1
else
match = false break
end
end
elseif f == 2 then
- match = sequence[1][32]
+ match = seq[1][32]
else
for n=f-1,1 do
- if not sequence[n][32] then
+ if not seq[n][32] then
match = false break
end
end
end
end
if match and s > l then
+ -- after
local current = last.next
if current then
- -- removed optimiziation for s-l == 1, we have to deal with marks anyway
+ -- removed optimization for s-l == 1, we have to deal with marks anyway
local n = l + 1
while n <= s do
if current then
@@ -8748,10 +9805,12 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
local ccd = descriptions[char]
if ccd then
local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase then
---~ if someskip and class == skipmark or class == skipligature or class == skipbase then
- -- skip 'm
- elseif sequence[n][char] then
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
n = n + 1
else
match = false break
@@ -8764,23 +9823,23 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
end
elseif id == disc then
-- skip 'm
- elseif sequence[n][32] then -- brrr
+ elseif seq[n][32] then -- brrr
n = n + 1
else
match = false break
end
current = current.next
- elseif sequence[n][32] then
+ elseif seq[n][32] then
n = n + 1
else
match = false break
end
end
elseif s-l == 1 then
- match = sequence[s][32]
+ match = seq[s][32]
else
for n=l+1,s do
- if not sequence[n][32] then
+ if not seq[n][32] then
match = false break
end
end
@@ -8790,7 +9849,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
if match then
-- ck == currentcontext
if trace_contexts then
- local rule, lookuptype, sequence, f, l = ck[1], ck[2] ,ck[3], ck[4], ck[5]
+ local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
local char = start.char
if ck[9] then
logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s (%s=>%s)",cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
@@ -8807,39 +9866,79 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
local chainlookup = lookuptable[chainlookupname]
local cp = chainprocs[chainlookup.type]
if cp then
- start, done = cp(start,last,kind,chainname,ck,cache,chainlookup,chainlookupname)
+ start, done = cp(start,last,kind,chainname,ck,cache,chainlookup,chainlookupname,nil,sequence)
else
logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
end
else
-- actually this needs a more complex treatment for which we will use chainmores
+--~ local i = 1
+--~ repeat
+--~ local chainlookupname = chainlookups[i]
+--~ local chainlookup = lookuptable[chainlookupname]
+--~ local cp = chainmores[chainlookup.type]
+--~ if cp then
+--~ local ok, n
+--~ start, ok, n = cp(start,last,kind,chainname,ck,cache,chainlookup,chainlookupname,i,sequence)
+--~ -- messy since last can be changed !
+--~ if ok then
+--~ done = true
+--~ start = start.next
+--~ if n then
+--~ -- skip next one(s) if ligature
+--~ i = i + n - 1
+--~ end
+--~ end
+--~ else
+--~ logprocess("%s: multiple subchains for %s are not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+--~ end
+--~ i = i + 1
+--~ until i > nofchainlookups
+
local i = 1
repeat
+if skipped then
+ while true do
+ local char = start.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ start = start.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+end
local chainlookupname = chainlookups[i]
local chainlookup = lookuptable[chainlookupname]
local cp = chainmores[chainlookup.type]
if cp then
local ok, n
- start, ok, n = cp(start,last,kind,chainname,ck,cache,chainlookup,chainlookupname,i)
+ start, ok, n = cp(start,last,kind,chainname,ck,cache,chainlookup,chainlookupname,i,sequence)
-- messy since last can be changed !
if ok then
done = true
- start = start.next
- if n then
- -- skip next one(s) if ligature
- i = i + n - 1
- end
+ -- skip next one(s) if ligature
+ i = i + (n or 1)
+ else
+ i = i + 1
end
else
logprocess("%s: multiple subchains for %s are not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ i = i + 1
end
- i = i + 1
+ start = start.next
until i > nofchainlookups
+
end
else
local replacements = ck[7]
if replacements then
- start, done = chainprocs.reversesub(start,last,kind,chainname,ck,cache,replacements)
+ start, done = chainprocs.reversesub(start,last,kind,chainname,ck,cache,replacements) -- sequence
else
done = true -- can be meant to be skipped
if trace_contexts then
@@ -8911,6 +10010,8 @@ end
local resolved = { } -- we only resolve a font,script,language pair once
+-- todo: pass all these 'locals' in a table
+
function fonts.methods.node.otf.features(head,font,attr)
if trace_steps then
checkstep(head)
@@ -8954,6 +10055,7 @@ function fonts.methods.node.otf.features(head,font,attr)
local ra = rl [attr] if ra == nil then ra = { } rl [attr] = ra end -- attr can be false
-- sequences always > 1 so no need for optimization
for s=1,#sequences do
+ local pardir, txtdir = 0, { }
local success = false
local sequence = sequences[s]
local r = ra[s] -- cache
@@ -8979,12 +10081,10 @@ function fonts.methods.node.otf.features(head,font,attr)
-- only first attribute match check, so we assume simple fina's
-- default can become a font feature itself
if l[language] then
---~ valid, what = true, language
valid, what = s_e or a_e, language
-- elseif l[default] then
-- valid, what = true, default
elseif l[wildcard] then
---~ valid, what = true, wildcard
valid, what = s_e or a_e, wildcard
end
if valid then
@@ -9019,12 +10119,12 @@ function fonts.methods.node.otf.features(head,font,attr)
local handler = handlers[typ]
local thecache = featuredata[typ] or { }
-- we need to get rid of this slide !
- start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
+ local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
while start do
local id = start.id
if id == glyph then
---~ if start.subtype<256 and start.font == font and (not attr or has_attribute(start,0,attr)) then
- if start.subtype<256 and start.font == font and has_attribute(start,0,attr) then
+ if start.subtype<256 and start.font == font and (not attr or has_attribute(start,0,attr)) then
+--~ if start.subtype<256 and start.font == font and has_attribute(start,0,attr) then
for i=1,#subtables do
local lookupname = subtables[i]
local lookupcache = thecache[lookupname]
@@ -9052,8 +10152,8 @@ function fonts.methods.node.otf.features(head,font,attr)
local handler = handlers[typ]
local ns = #subtables
local thecache = featuredata[typ] or { }
- start = head -- local ?
- rlmode = 0
+ local start = head -- local ?
+ rlmode = 0 -- to be checked ?
if ns == 1 then
local lookupname = subtables[1]
local lookupcache = thecache[lookupname]
@@ -9063,8 +10163,8 @@ function fonts.methods.node.otf.features(head,font,attr)
while start do
local id = start.id
if id == glyph then
---~ if start.font == font and start.subtype<256 and (not attr or has_attribute(start,0,attr)) and (not attribute or has_attribute(start,state,attribute)) then
- if start.font == font and start.subtype<256 and has_attribute(start,0,attr) and (not attribute or has_attribute(start,state,attribute)) then
+--~ if start.font == font and start.subtype<256 and has_attribute(start,0,attr) and (not attribute or has_attribute(start,state,attribute)) then
+ if start.font == font and start.subtype<256 and (not attr or has_attribute(start,0,attr)) and (not attribute or has_attribute(start,state,attribute)) then
local lookupmatch = lookupcache[start.char]
if lookupmatch then
-- sequence kan weg
@@ -9097,21 +10197,35 @@ function fonts.methods.node.otf.features(head,font,attr)
local subtype = start.subtype
if subtype == 7 then
local dir = start.dir
- if dir == "+TRT" then
+ if dir == "+TRT" or dir == "+TLT" then
+ insert(txtdir,dir)
+ elseif dir == "-TRT" or dir == "-TLT" then
+ remove(txtdir)
+ end
+ local d = txtdir[#txtdir]
+ if d == "+TRT" then
rlmode = -1
- elseif dir == "+TLT" then
+ elseif d == "+TLT" then
rlmode = 1
else
- rlmode = 0
+ rlmode = pardir
+ end
+ if trace_directions then
+ logs.report("fonts","directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
elseif subtype == 6 then
local dir = start.dir
if dir == "TRT" then
- rlmode = -1
+ pardir = -1
elseif dir == "TLT" then
- rlmode = 1
+ pardir = 1
else
- rlmode = 0
+ pardir = 0
+ end
+ rlmode = pardir
+ --~ txtdir = { }
+ if trace_directions then
+ logs.report("fonts","directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
end
start = start.next
@@ -9119,14 +10233,13 @@ function fonts.methods.node.otf.features(head,font,attr)
start = start.next
end
end
-
end
else
while start do
local id = start.id
if id == glyph then
---~ if start.subtype<256 and start.font == font and (not attr or has_attribute(start,0,attr)) and (not attribute or has_attribute(start,state,attribute)) then
- if start.subtype<256 and start.font == font and has_attribute(start,0,attr) and (not attribute or has_attribute(start,state,attribute)) then
+ if start.subtype<256 and start.font == font and (not attr or has_attribute(start,0,attr)) and (not attribute or has_attribute(start,state,attribute)) then
+--~ if start.subtype<256 and start.font == font and has_attribute(start,0,attr) and (not attribute or has_attribute(start,state,attribute)) then
for i=1,ns do
local lookupname = subtables[i]
local lookupcache = thecache[lookupname]
@@ -9166,23 +10279,38 @@ function fonts.methods.node.otf.features(head,font,attr)
-- end
elseif id == whatsit then
local subtype = start.subtype
+ local subtype = start.subtype
if subtype == 7 then
local dir = start.dir
- if dir == "+TRT" then
+ if dir == "+TRT" or dir == "+TLT" then
+ insert(txtdir,dir)
+ elseif dir == "-TRT" or dir == "-TLT" then
+ remove(txtdir)
+ end
+ local d = txtdir[#txtdir]
+ if d == "+TRT" then
rlmode = -1
- elseif dir == "+TLT" then
+ elseif d == "+TLT" then
rlmode = 1
else
- rlmode = 0
+ rlmode = pardir
+ end
+ if trace_directions then
+ logs.report("fonts","directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
elseif subtype == 6 then
local dir = start.dir
if dir == "TRT" then
- rlmode = -1
+ pardir = -1
elseif dir == "TLT" then
- rlmode = 1
+ pardir = 1
else
- rlmode = 0
+ pardir = 0
+ end
+ rlmode = pardir
+ --~ txtdir = { }
+ if trace_directions then
+ logs.report("fonts","directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
end
start = start.next
@@ -9210,11 +10338,11 @@ otf.features.prepare = { }
-- document)
local function split(replacement,original,cache,unicodes)
- -- we can cache this too, but not the same
+ -- we can cache this too, but not the same (although unicode is a unique enough hash)
local o, t, n = { }, { }, 0
for s in gmatch(original,"[^ ]+") do
local us = unicodes[s]
- if type(us) == "number" then
+ if type(us) == "number" then -- tonumber(us)
o[#o+1] = us
else
o[#o+1] = us[1]
@@ -9223,7 +10351,7 @@ local function split(replacement,original,cache,unicodes)
for s in gmatch(replacement,"[^ ]+") do
n = n + 1
local us = unicodes[s]
- if type(us) == "number" then
+ if type(us) == "number" then -- tonumber(us)
t[o[n]] = us
else
t[o[n]] = us[1]
@@ -9280,142 +10408,153 @@ local function prepare_lookups(tfmdata)
-- we can change the otf table after loading but then we need to adapt base mode
-- as well (no big deal)
--
- for unicode, glyph in next, descriptions do
- local lookups = glyph.lookups
- if lookups then
- for lookup, whatever in next, lookups do
- for i=1,#whatever do -- normaly one
- local p = whatever[i]
- local what = p[1]
- if what == 'substitution' then
- local old, new = unicode, unicodes[p[2]]
- if type(new) == "table" then
- new = new[1]
- end
- local s = single[lookup]
- if not s then s = { } single[lookup] = s end
- s[old] = new
---~ if trace_lookups then
---~ logs.report("define otf","lookup %s: substitution %s => %s",lookup,old,new)
---~ end
- break
- elseif what == 'multiple' then
- local old, new = unicode, { }
- local m = multiple[lookup]
- if not m then m = { } multiple[lookup] = m end
- m[old] = new
- for pc in gmatch(p[2],"[^ ]+") do
- local upc = unicodes[pc]
- if type(upc) == "number" then
- new[#new+1] = upc
- else
- new[#new+1] = upc[1]
- end
- end
---~ if trace_lookups then
---~ logs.report("define otf","lookup %s: multiple %s => %s",lookup,old,concat(new," "))
---~ end
+ local action = {
+ substitution = function(p,lookup,k,glyph,unicode)
+ local old, new = unicode, unicodes[p[2]]
+ if type(new) == "table" then
+ new = new[1]
+ end
+ local s = single[lookup]
+ if not s then s = { } single[lookup] = s end
+ s[old] = new
+ --~ if trace_lookups then
+ --~ logs.report("define otf","lookup %s: substitution %s => %s",lookup,old,new)
+ --~ end
+ end,
+ multiple = function (p,lookup,k,glyph,unicode)
+ local old, new = unicode, { }
+ local m = multiple[lookup]
+ if not m then m = { } multiple[lookup] = m end
+ m[old] = new
+ for pc in gmatch(p[2],"[^ ]+") do
+ local upc = unicodes[pc]
+ if type(upc) == "number" then
+ new[#new+1] = upc
+ else
+ new[#new+1] = upc[1]
+ end
+ end
+ --~ if trace_lookups then
+ --~ logs.report("define otf","lookup %s: multiple %s => %s",lookup,old,concat(new," "))
+ --~ end
+ end,
+ alternate = function(p,lookup,k,glyph,unicode)
+ local old, new = unicode, { }
+ local a = alternate[lookup]
+ if not a then a = { } alternate[lookup] = a end
+ a[old] = new
+ for pc in gmatch(p[2],"[^ ]+") do
+ local upc = unicodes[pc]
+ if type(upc) == "number" then
+ new[#new+1] = upc
+ else
+ new[#new+1] = upc[1]
+ end
+ end
+ --~ if trace_lookups then
+ --~ logs.report("define otf","lookup %s: alternate %s => %s",lookup,old,concat(new,"|"))
+ --~ end
+ end,
+ ligature = function (p,lookup,k,glyph,unicode)
+ --~ if trace_lookups then
+ --~ logs.report("define otf","lookup %s: ligature %s => %s",lookup,p[2],glyph.name)
+ --~ end
+ local first = true
+ local t = ligature[lookup]
+ if not t then t = { } ligature[lookup] = t end
+ for s in gmatch(p[2],"[^ ]+") do
+ if first then
+ local u = unicodes[s]
+ if not u then
+ logs.report("define otf","lookup %s: ligature %s => %s ignored due to invalid unicode",lookup,p[2],glyph.name)
break
- elseif what == 'alternate' then
- local old, new = unicode, { }
- local a = alternate[lookup]
- if not a then a = { } alternate[lookup] = a end
- a[old] = new
- for pc in gmatch(p[2],"[^ ]+") do
- local upc = unicodes[pc]
- if type(upc) == "number" then
- new[#new+1] = upc
- else
- new[#new+1] = upc[1]
- end
+ elseif type(u) == "number" then
+ if not t[u] then
+ t[u] = { { } }
end
---~ if trace_lookups then
---~ logs.report("define otf","lookup %s: alternate %s => %s",lookup,old,concat(new,"|"))
---~ end
- break
- elseif what == "ligature" then
---~ if trace_lookups then
---~ logs.report("define otf","lookup %s: ligature %s => %s",lookup,p[2],glyph.name)
---~ end
- local first = true
- local t = ligature[lookup]
- if not t then t = { } ligature[lookup] = t end
- for s in gmatch(p[2],"[^ ]+") do
- if first then
- local u = unicodes[s]
- if not u then
- logs.report("define otf","lookup %s: ligature %s => %s ignored due to invalid unicode",lookup,p[2],glyph.name)
- break
- elseif type(u) == "number" then
- if not t[u] then
- t[u] = { { } }
- end
- t = t[u]
- else
- local tt = t
- local tu
- for i=1,#u do
- local u = u[i]
- if i==1 then
- if not t[u] then
- t[u] = { { } }
- end
- tu = t[u]
- t = tu
- else
- if not t[u] then
- tt[u] = tu
- end
- end
- end
+ t = t[u]
+ else
+ local tt = t
+ local tu
+ for i=1,#u do
+ local u = u[i]
+ if i==1 then
+ if not t[u] then
+ t[u] = { { } }
end
- first = false
+ tu = t[u]
+ t = tu
else
- s = unicodes[s]
- local t1 = t[1]
- if not t1[s] then
- t1[s] = { { } }
+ if not t[u] then
+ tt[u] = tu
end
- t = t1[s]
end
end
- t[2] = unicode
- elseif what == 'position' then
- -- not used
- local s = position[lookup]
- if not s then s = { } position[lookup] = s end
- s[unicode] = p[2] -- direct pointer to kern spec
- elseif what == 'pair' then
- local s = pair[lookup]
- if not s then s = { } pair[lookup] = s end
- local others = s[unicode]
- if not others then others = { } s[unicode] = others end
- -- todo: fast check for space
- local two = p[2]
- local upc = unicodes[two]
- if not upc then
- for pc in gmatch(two,"[^ ]+") do
- local upc = unicodes[pc]
- if type(upc) == "number" then
- others[upc] = p -- direct pointer to main table
- else
- for i=1,#upc do
- others[upc[i]] = p -- direct pointer to main table
- end
- end
- end
- elseif type(upc) == "number" then
- others[upc] = p -- direct pointer to main table
- else
- for i=1,#upc do
- others[upc[i]] = p -- direct pointer to main table
- end
+ end
+ first = false
+ else
+ s = unicodes[s]
+ local t1 = t[1]
+ if not t1[s] then
+ t1[s] = { { } }
+ end
+ t = t1[s]
+ end
+ end
+ t[2] = unicode
+ end,
+ position = function(p,lookup,k,glyph,unicode)
+ -- not used
+ local s = position[lookup]
+ if not s then s = { } position[lookup] = s end
+ s[unicode] = p[2] -- direct pointer to kern spec
+ end,
+ pair = function(p,lookup,k,glyph,unicode)
+ local s = pair[lookup]
+ if not s then s = { } pair[lookup] = s end
+ local others = s[unicode]
+ if not others then others = { } s[unicode] = others end
+ -- todo: fast check for space
+ local two = p[2]
+ local upc = unicodes[two]
+ if not upc then
+ for pc in gmatch(two,"[^ ]+") do
+ local upc = unicodes[pc]
+ if type(upc) == "number" then
+ others[upc] = p -- direct pointer to main table
+ else
+ for i=1,#upc do
+ others[upc[i]] = p -- direct pointer to main table
end
---~ if trace_lookups then
---~ logs.report("define otf","lookup %s: pair for U+%04X",lookup,unicode)
---~ end
end
end
+ elseif type(upc) == "number" then
+ others[upc] = p -- direct pointer to main table
+ else
+ for i=1,#upc do
+ others[upc[i]] = p -- direct pointer to main table
+ end
+ end
+ --~ if trace_lookups then
+ --~ logs.report("define otf","lookup %s: pair for U+%04X",lookup,unicode)
+ --~ end
+ end,
+ }
+ --
+ for unicode, glyph in next, descriptions do
+ local lookups = glyph.slookups
+ if lookups then
+ for lookup, p in next, lookups do
+ action[p[1]](p,lookup,k,glyph,unicode)
+ end
+ end
+ local lookups = glyph.mlookups
+ if lookups then
+ for lookup, whatever in next, lookups do
+ for i=1,#whatever do -- normaly one
+ local p = whatever[i]
+ action[p[1]](p,lookup,k,glyph,unicode)
+ end
end
end
local list = glyph.mykerns
@@ -9424,9 +10563,9 @@ local function prepare_lookups(tfmdata)
local k = kerns[lookup]
if not k then k = { } kerns[lookup] = k end
k[unicode] = krn -- ref to glyph, saves lookup
---~ if trace_lookups then
---~ logs.report("define otf","lookup %s: kern for U+%04X",lookup,unicode)
---~ end
+ --~ if trace_lookups then
+ --~ logs.report("define otf","lookup %s: kern for U+%04X",lookup,unicode)
+ --~ end
end
end
local oanchor = glyph.anchors
@@ -9440,9 +10579,9 @@ local function prepare_lookups(tfmdata)
local f = mark[lookup]
if not f then f = { } mark[lookup] = f end
f[unicode] = anchors -- ref to glyph, saves lookup
---~ if trace_lookups then
---~ logs.report("define otf","lookup %s: mark anchor %s for U+%04X",lookup,name,unicode)
---~ end
+ --~ if trace_lookups then
+ --~ logs.report("define otf","lookup %s: mark anchor %s for U+%04X",lookup,name,unicode)
+ --~ end
end
end
end
@@ -9454,9 +10593,9 @@ local function prepare_lookups(tfmdata)
local f = cursive[lookup]
if not f then f = { } cursive[lookup] = f end
f[unicode] = anchors -- ref to glyph, saves lookup
---~ if trace_lookups then
---~ logs.report("define otf","lookup %s: exit anchor %s for U+%04X",lookup,name,unicode)
---~ end
+ --~ if trace_lookups then
+ --~ logs.report("define otf","lookup %s: exit anchor %s for U+%04X",lookup,name,unicode)
+ --~ end
end
end
end
@@ -9570,6 +10709,46 @@ function prepare_contextchains(tfmdata)
end
end
end
+ elseif fmt == "glyphs" then
+ if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
+ logs.report("otf process","unsupported coverage %s for %s",lookuptype,lookupname)
+ else
+ local contexts = contextchain[lookupname]
+ if not contexts then
+ contexts = { }
+ contextchain[lookupname] = contexts
+ end
+ local t = { }
+ for nofrules=1,#rules do
+ -- nearly the same as coverage so we could as well rename it
+ local rule = rules[nofrules]
+ local glyphs = rule.glyphs
+ if glyphs and glyphs.names then
+ local fore, back, names, sequence = glyphs.fore, glyphs.back, glyphs.names, { }
+ if fore and fore ~= "" then
+ fore = lpegmatch(split_at_space,fore)
+ uncover(fore,sequence,cache,unicodes)
+ end
+ local start = #sequence + 1
+ names = lpegmatch(split_at_space,names)
+ uncover(names,sequence,cache,unicodes)
+ local stop = #sequence
+ if back and back ~= "" then
+ back = lpegmatch(split_at_space,back)
+ uncover(back,sequence,cache,unicodes)
+ end
+ if sequence[1] then
+ t[#t+1] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
+ for unic, _ in next, sequence[start] do
+ local cu = contexts[unic]
+ if not cu then
+ contexts[unic] = t
+ end
+ end
+ end
+ end
+ end
+ end
end
end
end
@@ -9650,10 +10829,6 @@ local penalty = node.id('penalty')
local set_attribute = node.set_attribute
local has_attribute = node.has_attribute
local traverse_id = node.traverse_id
-local delete_node = nodes.delete
-local replace_node = nodes.replace
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
local traverse_node_list = node.traverse
local fontdata = fonts.ids
@@ -9669,7 +10844,6 @@ local a_to_language = otf.a_to_language
-- font related value, but then we also need dynamic features which is
-- somewhat slower; and .. we need a chain of them
-
function fonts.initializers.node.otf.analyze(tfmdata,value,attr)
if attr and attr > 0 then
script, language = a_to_script[attr], a_to_language[attr]
@@ -9746,7 +10920,8 @@ local isol_fina = {
[0x06D3] = true, [0x06D5] = true, [0x06EE] = true, [0x06EF] = true,
[0x0759] = true, [0x075A] = true, [0x075B] = true, [0x076B] = true,
[0x076C] = true, [0x0771] = true, [0x0773] = true, [0x0774] = true,
- [0x0778] = true, [0x0779] = true,
+ [0x0778] = true, [0x0779] = true, [0xFEF5] = true, [0xFEF7] = true,
+ [0xFEF9] = true, [0xFEFB] = true,
}
local isol_fina_medi_init = {
@@ -9807,8 +10982,6 @@ function fonts.analyzers.methods.nocolor(head,font,attr)
return head, true
end
-otf.remove_joiners = false -- true -- for idris who want it as option
-
local function finish(first,last)
if last then
if first == last then
@@ -9854,22 +11027,10 @@ function fonts.analyzers.methods.arab(head,font,attr) -- maybe make a special ve
local tfmdata = fontdata[font]
local marks = tfmdata.marks
local first, last, current, done = nil, nil, head, false
- local joiners, nonjoiners
- local removejoiners = tfmdata.remove_joiners -- or otf.remove_joiners
- if removejoiners then
- joiners, nonjoiners = { }, { }
- end
while current do
if current.id == glyph and current.subtype<256 and current.font == font and not has_attribute(current,state) then
done = true
local char = current.char
- if removejoiners then
- if char == zwj then
- joiners[#joiners+1] = current
- elseif char == zwnj then
- nonjoiners[#nonjoiners+1] = current
- end
- end
if marks[char] then
set_attribute(current,state,5) -- mark
if trace_analyzing then fcs(current,"font:mark") end
@@ -9915,25 +11076,9 @@ function fonts.analyzers.methods.arab(head,font,attr) -- maybe make a special ve
current = current.next
end
first, last = finish(first,last)
- if removejoiners then
- for i=1,#joiners do
- head = delete_node(head,joiners[i])
- end
- for i=1,#nonjoiners do
- head = replace_node(head,nonjoiners[i],nodes.glue(0)) -- or maybe a kern
- end
- end
return head, done
end
-table.insert(fonts.manipulators,"joiners")
-
-function fonts.initializers.node.otf.joiners(tfmdata,value)
- if value == "strip" then
- tfmdata.remove_joiners = true
- end
-end
-
end -- closure
do -- begin closure to overcome local limits and interference
@@ -9949,8 +11094,6 @@ if not modules then modules = { } end modules ['font-otc'] = {
local format, insert = string.format, table.insert
local type, next = type, next
-local ctxcatcodes = tex.ctxcatcodes
-
-- we assume that the other otf stuff is loaded already
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
@@ -9961,6 +11104,8 @@ local tfm = fonts.tfm
-- instead of "script = "DFLT", langs = { 'dflt' }" we now use wildcards (we used to
-- have always); some day we can write a "force always when true" trick for other
-- features as well
+--
+-- we could have a tnum variant as well
local extra_lists = {
tlig = {
@@ -10152,38 +11297,13 @@ fonts.initializers.node.otf.lineheight = fonts.initializers.common.lineheight
fonts.initializers.base.otf.compose = fonts.initializers.common.compose
fonts.initializers.node.otf.compose = fonts.initializers.common.compose
--- bonus function
-
-function otf.name_to_slot(name) -- todo: afm en tfm
- local tfmdata = fonts.ids[font.current()]
- if tfmdata and tfmdata.shared then
- local otfdata = tfmdata.shared.otfdata
- local unicode = otfdata.luatex.unicodes[name]
- if type(unicode) == "number" then
- return unicode
- else
- return unicode[1]
- end
- end
- return nil
-end
-
-function otf.char(n) -- todo: afm en tfm
- if type(n) == "string" then
- n = otf.name_to_slot(n)
- end
- if n then
- tex.sprint(ctxcatcodes,format("\\char%s ",n))
- end
-end
-
end -- closure
do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['font-def'] = {
version = 1.001,
- comment = "companion to font-ini.tex",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -10191,8 +11311,10 @@ if not modules then modules = { } end modules ['font-def'] = {
local format, concat, gmatch, match, find, lower = string.format, table.concat, string.gmatch, string.match, string.find, string.lower
local tostring, next = tostring, next
+local lpegmatch = lpeg.match
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end)
+local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end)
trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading")
trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*")
@@ -10260,22 +11382,37 @@ and prepares a table that will move along as we proceed.</p>
local splitter, specifiers = nil, ""
+local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc
+
+local left = P("(")
+local right = P(")")
+local colon = P(":")
+local space = P(" ")
+
+define.defaultlookup = "file"
+
+local prefixpattern = P(false)
+
function define.add_specifier(symbol)
specifiers = specifiers .. symbol
- local left = lpeg.P("(")
- local right = lpeg.P(")")
- local colon = lpeg.P(":")
- local method = lpeg.S(specifiers)
- local lookup = lpeg.C(lpeg.P("file")+lpeg.P("name")) * colon -- hard test, else problems with : method
- local sub = left * lpeg.C(lpeg.P(1-left-right-method)^1) * right
---~ local specification = lpeg.C(method) * lpeg.C(lpeg.P(1-method)^1)
- local specification = lpeg.C(method) * lpeg.C(lpeg.P(1)^1)
- local name = lpeg.C((1-sub-specification)^1)
- splitter = lpeg.P((lookup + lpeg.Cc("")) * name * (sub + lpeg.Cc("")) * (specification + lpeg.Cc("")))
+ local method = S(specifiers)
+ local lookup = C(prefixpattern) * colon
+ local sub = left * C(P(1-left-right-method)^1) * right
+ local specification = C(method) * C(P(1)^1)
+ local name = C((1-sub-specification)^1)
+ splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc("")))
+end
+
+function define.add_lookup(str,default)
+ prefixpattern = prefixpattern + P(str)
end
+define.add_lookup("file")
+define.add_lookup("name")
+define.add_lookup("spec")
+
function define.get_specification(str)
- return splitter:match(str)
+ return lpegmatch(splitter,str)
end
function define.register_split(symbol,action)
@@ -10294,8 +11431,8 @@ function define.makespecification(specification, lookup, name, sub, method, deta
--~ lookup = specification.lookup -- can come from xetex [] syntax
--~ specification.lookup = nil
--~ end
- if lookup ~= 'name' then -- for the moment only two lookups, maybe some day also system:
- lookup = 'file'
+ if not lookup or lookup == "" then
+ lookup = define.defaultlookup
end
local t = {
lookup = lookup, -- forced type
@@ -10315,7 +11452,7 @@ end
function define.analyze(specification, size)
-- can be optimized with locals
local lookup, name, sub, method, detail = define.get_specification(specification or "")
- return define.makespecification(specification,lookup, name, sub, method, detail, size)
+ return define.makespecification(specification, lookup, name, sub, method, detail, size)
end
--[[ldx--
@@ -10397,17 +11534,44 @@ end
<p>We can resolve the filename using the next function:</p>
--ldx]]--
+define.resolvers = resolvers
+
+function define.resolvers.file(specification)
+ specification.forced = file.extname(specification.name)
+ specification.name = file.removesuffix(specification.name)
+end
+
+function define.resolvers.name(specification)
+ local resolve = fonts.names.resolve
+ if resolve then
+ specification.resolved, specification.sub = fonts.names.resolve(specification.name,specification.sub)
+ if specification.resolved then
+ specification.forced = file.extname(specification.resolved)
+ specification.name = file.removesuffix(specification.resolved)
+ end
+ else
+ define.resolvers.file(specification)
+ end
+end
+
+function define.resolvers.spec(specification)
+ local resolvespec = fonts.names.resolvespec
+ if resolvespec then
+ specification.resolved, specification.sub = fonts.names.resolvespec(specification.name,specification.sub)
+ if specification.resolved then
+ specification.forced = file.extname(specification.resolved)
+ specification.name = file.removesuffix(specification.resolved)
+ end
+ else
+ define.resolvers.name(specification)
+ end
+end
+
function define.resolve(specification)
if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash
- if specification.lookup == 'name' then
- specification.resolved, specification.sub = fonts.names.resolve(specification.name,specification.sub)
- if specification.resolved then
- specification.forced = file.extname(specification.resolved)
- specification.name = file.removesuffix(specification.resolved)
- end
- elseif specification.lookup == 'file' then
- specification.forced = file.extname(specification.name)
- specification.name = file.removesuffix(specification.name)
+ local r = define.resolvers[specification.lookup]
+ if r then
+ r(specification)
end
end
if specification.forced == "" then
@@ -10415,7 +11579,6 @@ function define.resolve(specification)
else
specification.forced = specification.forced
end
---~ specification.hash = specification.name .. ' @ ' .. tfm.hash_features(specification)
specification.hash = lower(specification.name .. ' @ ' .. tfm.hash_features(specification))
if specification.sub and specification.sub ~= "" then
specification.hash = specification.sub .. ' @ ' .. specification.hash
@@ -10454,7 +11617,7 @@ function tfm.read(specification)
local reader = sequence[s]
if readers[reader] then -- not really needed
if trace_defining then
- logs.report("define font","trying (sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
+ logs.report("define font","trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
end
tfmtable = readers[reader](specification)
if tfmtable then
@@ -10466,7 +11629,9 @@ function tfm.read(specification)
end
end
if tfmtable then
- if tfmtable.filename and fonts.dontembed[tfmtable.filename] then
+ if directive_embedall then
+ tfmtable.embedding = "full"
+ elseif tfmtable.filename and fonts.dontembed[tfmtable.filename] then
tfmtable.embedding = "no"
else
tfmtable.embedding = "subset"
@@ -10589,16 +11754,22 @@ function readers.afm(specification,method)
return tfmtable
end
-local function check_otf(specification,suffix,what)
- local fullname, tfmtable = resolvers.findbinfile(specification.name,suffix) or "", nil
+-- maybe some day a set of names
+
+local function check_otf(forced,specification,suffix,what)
+ local name = specification.name
+ if forced then
+ name = file.addsuffix(name,suffix)
+ end
+ local fullname, tfmtable = resolvers.findbinfile(name,suffix) or "", nil -- one shot
if fullname == "" then
- local fb = fonts.names.old_to_new[specification.name]
+ local fb = fonts.names.old_to_new[name]
if fb then
fullname = resolvers.findbinfile(fb,suffix) or ""
end
end
if fullname == "" then
- local fb = fonts.names.new_to_old[specification.name]
+ local fb = fonts.names.new_to_old[name]
if fb then
fullname = resolvers.findbinfile(fb,suffix) or ""
end
@@ -10613,13 +11784,11 @@ end
function readers.opentype(specification,suffix,what)
local forced = specification.forced or ""
if forced == "otf" then
- return check_otf(specification,forced,"opentype")
- elseif forced == "ttf" then
- return check_otf(specification,forced,"truetype")
- elseif forced == "ttf" then
- return check_otf(specification,forced,"truetype")
+ return check_otf(true,specification,forced,"opentype")
+ elseif forced == "ttf" or forced == "ttc" or forced == "dfont" then
+ return check_otf(true,specification,forced,"truetype")
else
- return check_otf(specification,suffix,what)
+ return check_otf(false,specification,suffix,what)
end
end
@@ -10635,14 +11804,14 @@ a helper function.</p>
function define.check(features,defaults) -- nb adapts features !
local done = false
- if table.is_empty(features) then
- features, done = table.fastcopy(defaults), true
- else
+ if features and next(features) then
for k,v in next, defaults do
if features[k] == nil then
features[k], done = v, true
end
end
+ else
+ features, done = table.fastcopy(defaults), true
end
return features, done -- done signals a change
end
@@ -10668,7 +11837,10 @@ function define.register(fontdata,id)
if trace_defining then
logs.report("define font","loading at 2 id %s, hash: %s",id or "?",hash or "?")
end
- fonts.ids[id] = fontdata
+ fonts.identifiers[id] = fontdata
+ fonts.characters [id] = fontdata.characters
+ fonts.quads [id] = fontdata.parameters.quad
+ -- todo: extra functions, e.g. setdigitwidth etc in list
tfm.internalized[hash] = id
end
end
@@ -10778,8 +11950,8 @@ end
<p>We overload both the <l n='tfm'/> and <l n='vf'/> readers.</p>
--ldx]]--
-callback.register('define_font' , define.read)
-callback.register('find_vf_file', vf.find ) -- not that relevant any more
+callbacks.register('define_font' , define.read, "definition of fonts (tfmtable preparation)")
+callbacks.register('find_vf_file', vf.find , "locating virtual fonts, insofar needed") -- not that relevant any more
end -- closure
@@ -10787,7 +11959,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['font-xtx'] = {
version = 1.001,
- comment = "companion to font-ini.tex",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -10796,6 +11968,7 @@ if not modules then modules = { } end modules ['font-xtx'] = {
local texsprint, count = tex.sprint, tex.count
local format, concat, gmatch, match, find, lower = string.format, table.concat, string.gmatch, string.match, string.find, string.lower
local tostring, next = tostring, next
+local lpegmatch = lpeg.match
local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
@@ -10858,12 +12031,15 @@ local function istrue (s) list[s] = 'yes' end
local function isfalse(s) list[s] = 'no' end
local function iskey (k,v) list[k] = v end
+local function istrue (s) list[s] = true end
+local function isfalse(s) list[s] = false end
+
local spaces = lpeg.P(" ")^0
local namespec = (1-lpeg.S("/:("))^0 -- was: (1-lpeg.S("/: ("))^0
local crapspec = spaces * lpeg.P("/") * (((1-lpeg.P(":"))^0)/iscrap) * spaces
local filename = (lpeg.P("file:")/isfile * (namespec/thename)) + (lpeg.P("[") * lpeg.P(true)/isname * (((1-lpeg.P("]"))^0)/thename) * lpeg.P("]"))
local fontname = (lpeg.P("name:")/isname * (namespec/thename)) + lpeg.P(true)/issome * (namespec/thename)
-local sometext = (lpeg.R("az") + lpeg.R("AZ") + lpeg.R("09"))^1
+local sometext = (lpeg.R("az","AZ","09") + lpeg.S("+-."))^1
local truevalue = lpeg.P("+") * spaces * (sometext/istrue)
local falsevalue = lpeg.P("-") * spaces * (sometext/isfalse)
local keyvalue = (lpeg.C(sometext) * spaces * lpeg.P("=") * spaces * lpeg.C(sometext))/iskey
@@ -10873,15 +12049,17 @@ local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * sp
local options = lpeg.P(":") * spaces * (lpeg.P(";")^0 * option)^0
local pattern = (filename + fontname) * subvalue^0 * crapspec^0 * options^0
+local normalize_meanings = fonts.otf.meanings.normalize
+
function fonts.define.specify.colonized(specification) -- xetex mode
list = { }
- pattern:match(specification.specification)
- for k, v in next, list do
- list[k] = v:is_boolean()
- if type(list[a]) == "nil" then
- list[k] = v
- end
- end
+ lpegmatch(pattern,specification.specification)
+--~ for k, v in next, list do
+--~ list[k] = v:is_boolean()
+--~ if type(list[a]) == "nil" then
+--~ list[k] = v
+--~ end
+--~ end
list.crap = nil -- style not supported, maybe some day
if list.name then
specification.name = list.name
@@ -10895,7 +12073,8 @@ function fonts.define.specify.colonized(specification) -- xetex mode
specification.sub = list.sub
list.sub = nil
end
- specification.features.normal = list
+-- specification.features.normal = list
+ specification.features.normal = normalize_meanings(list)
return specification
end
@@ -10905,229 +12084,6 @@ end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-map'] = {
- version = 1.001,
- comment = "companion to font-ini.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local match, format, find, concat = string.match, string.format, string.find, table.concat
-
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-
-local ctxcatcodes = tex.ctxcatcodes
-
---[[ldx--
-<p>Eventually this code will disappear because map files are kind
-of obsolete. Some code may move to runtime or auxiliary modules.</p>
-<p>The name to unciode related code will stay of course.</p>
---ldx]]--
-
-fonts = fonts or { }
-fonts.map = fonts.map or { }
-fonts.map.data = fonts.map.data or { }
-fonts.map.encodings = fonts.map.encodings or { }
-fonts.map.done = fonts.map.done or { }
-fonts.map.loaded = fonts.map.loaded or { }
-fonts.map.direct = fonts.map.direct or { }
-fonts.map.line = fonts.map.line or { }
-
-function fonts.map.line.pdfmapline(tag,str)
- return "\\loadmapline[" .. tag .. "][" .. str .. "]"
-end
-
-function fonts.map.line.pdftex(e) -- so far no combination of slant and stretch
- if e.name and e.fontfile then
- local fullname = e.fullname or ""
- if e.slant and e.slant ~= 0 then
- if e.encoding then
- return fonts.map.line.pdfmapline("=",format('%s %s "%g SlantFont" <%s <%s',e.name,fullname,e.slant,e.encoding,e.fontfile))
- else
- return fonts.map.line.pdfmapline("=",format('%s %s "%g SlantFont" <%s',e.name,fullname,e.slant,e.fontfile))
- end
- elseif e.stretch and e.stretch ~= 1 and e.stretch ~= 0 then
- if e.encoding then
- return fonts.map.line.pdfmapline("=",format('%s %s "%g ExtendFont" <%s <%s',e.name,fullname,e.stretch,e.encoding,e.fontfile))
- else
- return fonts.map.line.pdfmapline("=",format('%s %s "%g ExtendFont" <%s',e.name,fullname,e.stretch,e.fontfile))
- end
- else
- if e.encoding then
- return fonts.map.line.pdfmapline("=",format('%s %s <%s <%s',e.name,fullname,e.encoding,e.fontfile))
- else
- return fonts.map.line.pdfmapline("=",format('%s %s <%s',e.name,fullname,e.fontfile))
- end
- end
- else
- return nil
- end
-end
-
-function fonts.map.flush(backend) -- will also erase the accumulated data
- local flushline = fonts.map.line[backend or "pdftex"] or fonts.map.line.pdftex
- for _, e in pairs(fonts.map.data) do
- tex.sprint(ctxcatcodes,flushline(e))
- end
- fonts.map.data = { }
-end
-
-fonts.map.line.dvips = fonts.map.line.pdftex
-fonts.map.line.dvipdfmx = function() end
-
-function fonts.map.convert_entries(filename)
- if not fonts.map.loaded[filename] then
- fonts.map.data, fonts.map.encodings = fonts.map.load_file(filename,fonts.map.data, fonts.map.encodings)
- fonts.map.loaded[filename] = true
- end
-end
-
-function fonts.map.load_file(filename, entries, encodings)
- entries = entries or { }
- encodings = encodings or { }
- local f = io.open(filename)
- if f then
- local data = f:read("*a")
- if data then
- for line in gmatch(data,"(.-)[\n\t]") do
- if find(line,"^[%#%%%s]") then
- -- print(line)
- else
- local stretch, slant, name, fullname, fontfile, encoding
- line = line:gsub('"(.+)"', function(s)
- stretch = find(s,'"([^"]+) ExtendFont"')
- slant = find(s,'"([^"]+) SlantFont"')
- return ""
- end)
- if not name then
- -- name fullname encoding fontfile
- name, fullname, encoding, fontfile = match(line,"^(%S+)%s+(%S*)[%s<]+(%S*)[%s<]+(%S*)%s*$")
- end
- if not name then
- -- name fullname (flag) fontfile encoding
- name, fullname, fontfile, encoding = match(line,"^(%S+)%s+(%S*)[%d%s<]+(%S*)[%s<]+(%S*)%s*$")
- end
- if not name then
- -- name fontfile
- name, fontfile = match(line,"^(%S+)%s+[%d%s<]+(%S*)%s*$")
- end
- if name then
- if encoding == "" then encoding = nil end
- entries[name] = {
- name = name, -- handy
- fullname = fullname,
- encoding = encoding,
- fontfile = fontfile,
- slant = tonumber(slant),
- stretch = tonumber(stretch)
- }
- encodings[name] = encoding
- elseif line ~= "" then
- -- print(line)
- end
- end
- end
- end
- f:close()
- end
- return entries, encodings
-end
-
-function fonts.map.load_lum_table(filename)
- local lumname = file.replacesuffix(file.basename(filename),"lum")
- local lumfile = resolvers.find_file(lumname,"map") or ""
- if lumfile ~= "" and lfs.isfile(lumfile) then
- if trace_loading or trace_unimapping then
- logs.report("load otf","enhance: loading %s ",lumfile)
- end
- lumunic = dofile(lumfile)
- return lumunic, lumfile
- end
-end
-
-local hex = lpeg.R("AF","09")
-local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end
-local dec = (lpeg.R("09")^1) / tonumber
-local period = lpeg.P(".")
-
-local unicode = lpeg.P("uni") * (hexfour * (period + lpeg.P(-1)) * lpeg.Cc(false) + lpeg.Ct(hexfour^1) * lpeg.Cc(true))
-local index = lpeg.P("index") * dec * lpeg.Cc(false)
-
-local parser = unicode + index
-
-local parsers = { }
-
-function fonts.map.make_name_parser(str)
- if not str or str == "" then
- return parser
- else
- local p = parsers[str]
- if not p then
- p = lpeg.P(str) * period * dec * lpeg.Cc(false)
- parsers[str] = p
- end
- return p
- end
-end
-
---~ local parser = fonts.map.make_name_parser("Japan1")
---~ local function test(str)
---~ local b, a = parser:match(str)
---~ print((a and table.serialize(b)) or b)
---~ end
---~ test("uni1234")
---~ test("uni1234.xx")
---~ test("uni12349876")
---~ test("index1234")
---~ test("Japan1.123")
-
-function fonts.map.tounicode16(unicode)
- if unicode < 0x10000 then
- return format("%04X",unicode)
- else
- return format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
- end
-end
-
-function fonts.map.tounicode16sequence(unicodes)
- local t = { }
- for l=1,#unicodes do
- local unicode = unicodes[l]
- if unicode < 0x10000 then
- t[l] = format("%04X",unicode)
- else
- t[l] = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
- end
- end
- return concat(t)
-end
-
---~ This is quite a bit faster but at the cost of some memory but if we
---~ do this we will also use it elsewhere so let's not follow this route
---~ now. I might use this method in the plain variant (no caching there)
---~ but then I need a flag that distinguishes between code branches.
---~
---~ local cache = { }
---~
---~ function fonts.map.tounicode16(unicode)
---~ local s = cache[unicode]
---~ if not s then
---~ if unicode < 0x10000 then
---~ s = format("%04X",unicode)
---~ else
---~ s = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
---~ end
---~ cache[unicode] = s
---~ end
---~ return s
---~ end
-
-
-end -- closure
-
-do -- begin closure to overcome local limits and interference
-
if not modules then modules = { } end modules ['font-dum'] = {
version = 1.001,
comment = "companion to luatex-*.tex",
@@ -11140,8 +12096,9 @@ fonts = fonts or { }
-- general
-fonts.otf.pack = false
-fonts.tfm.resolve_vf = false -- no sure about this
+fonts.otf.pack = false -- only makes sense in context
+fonts.tfm.resolve_vf = false -- no sure about this
+fonts.tfm.fontname_mode = "specification" -- somehow latex needs this
-- readers
@@ -11169,52 +12126,56 @@ function fonts.logger.save()
end
-- names
+--
+-- Watch out, the version number is the same as the one used in
+-- the mtx-fonts.lua function scripts.fonts.names as we use a
+-- simplified font database in the plain solution and by using
+-- a different number we're less dependent on context.
fonts.names = fonts.names or { }
+fonts.names.version = 1.001 -- not the same as in context
fonts.names.basename = "luatex-fonts-names.lua"
fonts.names.new_to_old = { }
fonts.names.old_to_new = { }
local data, loaded = nil, false
+local fileformats = { "lua", "tex", "other text files" }
+
function fonts.names.resolve(name,sub)
if not loaded then
local basename = fonts.names.basename
if basename and basename ~= "" then
- for _, format in ipairs { "lua", "tex", "other text files" } do
+ for i=1,#fileformats do
+ local format = fileformats[i]
local foundname = resolvers.find_file(basename,format) or ""
if foundname ~= "" then
data = dofile(foundname)
- if data then
- local d = { }
- for k, v in pairs(data.mapping) do
- local t = v[1]
- if t == "ttf" or t == "otf" or t == "ttc" or t == "dfont" then
- d[k] = v
- end
- end
- data.mapping = d
- end
break
end
end
end
loaded = true
end
- if type(data) == "table" and data.version == 1.08 then
+ if type(data) == "table" and data.version == fonts.names.version then
local condensed = string.gsub(string.lower(name),"[^%a%d]","")
- local found = data.mapping and data.mapping[condensed]
+ local found = data.mappings and data.mappings[condensed]
if found then
- local filename, is_sub = found[3], found[4]
- if is_sub then is_sub = found[2] end
- return filename, is_sub
+ local fontname, filename, subfont = found[1], found[2], found[3]
+ if subfont then
+ return filename, fontname
+ else
+ return filename, false
+ end
else
return name, false -- fallback to filename
end
end
end
+fonts.names.resolvespec = fonts.names.resolve -- only supported in mkiv
+
-- For the moment we put this (adapted) pseudo feature here.
table.insert(fonts.triggers,"itlc")
@@ -11242,7 +12203,187 @@ end
fonts.initializers.base.otf.itlc = itlc
fonts.initializers.node.otf.itlc = itlc
+-- slant and extend
+
+function fonts.initializers.common.slant(tfmdata,value)
+ value = tonumber(value)
+ if not value then
+ value = 0
+ elseif value > 1 then
+ value = 1
+ elseif value < -1 then
+ value = -1
+ end
+ tfmdata.slant_factor = value
+end
+
+function fonts.initializers.common.extend(tfmdata,value)
+ value = tonumber(value)
+ if not value then
+ value = 0
+ elseif value > 10 then
+ value = 10
+ elseif value < -10 then
+ value = -10
+ end
+ tfmdata.extend_factor = value
+end
+
+table.insert(fonts.triggers,"slant")
+table.insert(fonts.triggers,"extend")
+
+fonts.initializers.base.otf.slant = fonts.initializers.common.slant
+fonts.initializers.node.otf.slant = fonts.initializers.common.slant
+fonts.initializers.base.otf.extend = fonts.initializers.common.extend
+fonts.initializers.node.otf.extend = fonts.initializers.common.extend
+
+-- expansion and protrusion
+
+fonts.protrusions = fonts.protrusions or { }
+fonts.protrusions.setups = fonts.protrusions.setups or { }
+
+local setups = fonts.protrusions.setups
+
+function fonts.initializers.common.protrusion(tfmdata,value)
+ if value then
+ local setup = setups[value]
+ if setup then
+ local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1
+ local emwidth = tfmdata.parameters.quad
+ tfmdata.auto_protrude = true
+ for i, chr in next, tfmdata.characters do
+ local v, pl, pr = setup[i], nil, nil
+ if v then
+ pl, pr = v[1], v[2]
+ end
+ if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end
+ if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end
+ end
+ end
+ end
+end
+
+fonts.expansions = fonts.expansions or { }
+fonts.expansions.setups = fonts.expansions.setups or { }
+
+local setups = fonts.expansions.setups
+
+function fonts.initializers.common.expansion(tfmdata,value)
+ if value then
+ local setup = setups[value]
+ if setup then
+ local stretch, shrink, step, factor = setup.stretch or 0, setup.shrink or 0, setup.step or 0, setup.factor or 1
+ tfmdata.stretch, tfmdata.shrink, tfmdata.step, tfmdata.auto_expand = stretch * 10, shrink * 10, step * 10, true
+ for i, chr in next, tfmdata.characters do
+ local v = setup[i]
+ if v and v ~= 0 then
+ chr.expansion_factor = v*factor
+ else -- can be option
+ chr.expansion_factor = factor
+ end
+ end
+ end
+ end
+end
+
+table.insert(fonts.manipulators,"protrusion")
+table.insert(fonts.manipulators,"expansion")
+
+fonts.initializers.base.otf.protrusion = fonts.initializers.common.protrusion
+fonts.initializers.node.otf.protrusion = fonts.initializers.common.protrusion
+fonts.initializers.base.otf.expansion = fonts.initializers.common.expansion
+fonts.initializers.node.otf.expansion = fonts.initializers.common.expansion
+
+-- left over
+
function fonts.register_message()
end
+-- example vectors
+
+local byte = string.byte
+
+fonts.expansions.setups['default'] = {
+
+ stretch = 2, shrink = 2, step = .5, factor = 1,
+
+ [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7,
+ [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7,
+ [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7,
+ [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7,
+ [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7,
+ [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7,
+ [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7,
+ [byte('w')] = 0.7, [byte('z')] = 0.7,
+ [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7,
+}
+
+fonts.protrusions.setups['default'] = {
+
+ factor = 1, left = 1, right = 1,
+
+ [0x002C] = { 0, 1 }, -- comma
+ [0x002E] = { 0, 1 }, -- period
+ [0x003A] = { 0, 1 }, -- colon
+ [0x003B] = { 0, 1 }, -- semicolon
+ [0x002D] = { 0, 1 }, -- hyphen
+ [0x2013] = { 0, 0.50 }, -- endash
+ [0x2014] = { 0, 0.33 }, -- emdash
+ [0x3001] = { 0, 1 }, -- ideographic comma 、
+ [0x3002] = { 0, 1 }, -- ideographic full stop 。
+ [0x060C] = { 0, 1 }, -- arabic comma ،
+ [0x061B] = { 0, 1 }, -- arabic semicolon ؛
+ [0x06D4] = { 0, 1 }, -- arabic full stop ۔
+
+}
+
+-- normalizer
+
+fonts.otf.meanings = fonts.otf.meanings or { }
+
+fonts.otf.meanings.normalize = fonts.otf.meanings.normalize or function(t)
+ if t.rand then
+ t.rand = "random"
+ end
+end
+
+-- bonus
+
+function fonts.otf.name_to_slot(name)
+ local tfmdata = fonts.ids[font.current()]
+ if tfmdata and tfmdata.shared then
+ local otfdata = tfmdata.shared.otfdata
+ local unicode = otfdata.luatex.unicodes[name]
+ return unicode and (type(unicode) == "number" and unicode or unicode[1])
+ end
+end
+
+function fonts.otf.char(n)
+ if type(n) == "string" then
+ n = fonts.otf.name_to_slot(n)
+ end
+ if type(n) == "number" then
+ tex.sprint("\\char" .. n)
+ end
+end
+
+-- another one:
+
+fonts.strippables = table.tohash {
+ 0x000AD, 0x017B4, 0x017B5, 0x0200B, 0x0200C, 0x0200D, 0x0200E, 0x0200F, 0x0202A, 0x0202B,
+ 0x0202C, 0x0202D, 0x0202E, 0x02060, 0x02061, 0x02062, 0x02063, 0x0206A, 0x0206B, 0x0206C,
+ 0x0206D, 0x0206E, 0x0206F, 0x0FEFF, 0x1D173, 0x1D174, 0x1D175, 0x1D176, 0x1D177, 0x1D178,
+ 0x1D179, 0x1D17A, 0xE0001, 0xE0020, 0xE0021, 0xE0022, 0xE0023, 0xE0024, 0xE0025, 0xE0026,
+ 0xE0027, 0xE0028, 0xE0029, 0xE002A, 0xE002B, 0xE002C, 0xE002D, 0xE002E, 0xE002F, 0xE0030,
+ 0xE0031, 0xE0032, 0xE0033, 0xE0034, 0xE0035, 0xE0036, 0xE0037, 0xE0038, 0xE0039, 0xE003A,
+ 0xE003B, 0xE003C, 0xE003D, 0xE003E, 0xE003F, 0xE0040, 0xE0041, 0xE0042, 0xE0043, 0xE0044,
+ 0xE0045, 0xE0046, 0xE0047, 0xE0048, 0xE0049, 0xE004A, 0xE004B, 0xE004C, 0xE004D, 0xE004E,
+ 0xE004F, 0xE0050, 0xE0051, 0xE0052, 0xE0053, 0xE0054, 0xE0055, 0xE0056, 0xE0057, 0xE0058,
+ 0xE0059, 0xE005A, 0xE005B, 0xE005C, 0xE005D, 0xE005E, 0xE005F, 0xE0060, 0xE0061, 0xE0062,
+ 0xE0063, 0xE0064, 0xE0065, 0xE0066, 0xE0067, 0xE0068, 0xE0069, 0xE006A, 0xE006B, 0xE006C,
+ 0xE006D, 0xE006E, 0xE006F, 0xE0070, 0xE0071, 0xE0072, 0xE0073, 0xE0074, 0xE0075, 0xE0076,
+ 0xE0077, 0xE0078, 0xE0079, 0xE007A, 0xE007B, 0xE007C, 0xE007D, 0xE007E, 0xE007F,
+}
+
+
end -- closure
diff --git a/Master/texmf-dist/tex/generic/context/luatex-fonts.lua b/Master/texmf-dist/tex/generic/context/luatex-fonts.lua
index 56768138be9..84acb2b18c6 100644
--- a/Master/texmf-dist/tex/generic/context/luatex-fonts.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex-fonts.lua
@@ -111,6 +111,7 @@ else
loadmodule('font-tfm.lua') -- will be split (we may need font-log)
loadmodule('font-cid.lua')
loadmodule('font-ott.lua') -- might be split
+ loadmodule('font-map.lua') -- for loading lum file (will be stripped)
loadmodule('font-otf.lua')
loadmodule('font-otd.lua')
loadmodule('font-oti.lua')
@@ -120,7 +121,6 @@ else
loadmodule('font-otc.lua')
loadmodule('font-def.lua')
loadmodule('font-xtx.lua')
- loadmodule('font-map.lua') -- for loading lum file (will be stripped)
loadmodule('font-dum.lua')
end
diff --git a/Master/texmf-dist/tex/generic/context/luatex-mplib.lua b/Master/texmf-dist/tex/generic/context/luatex-mplib.lua
index 6f9bdc7ef41..0afad083ac2 100644
--- a/Master/texmf-dist/tex/generic/context/luatex-mplib.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex-mplib.lua
@@ -1,6 +1,6 @@
if not modules then modules = { } end modules ['supp-mpl'] = {
version = 1.001,
- comment = "companion to supp-mpl.tex",
+ comment = "companion to luatex-mplib.tex",
author = "Hans Hagen & Taco Hoekwater",
copyright = "ConTeXt Development Team",
license = "public domain",
@@ -22,7 +22,7 @@ if metapost and metapost.version then
else
- local format, concat, abs = string.format, table.concat, math.abs
+ local format, concat, abs, match = string.format, table.concat, math.abs, string.match
local mplib = require ('mplib')
local kpse = require ('kpse')
@@ -320,7 +320,7 @@ else
metapost.report("flushing figure %s",f)
local figure = figures[f]
local objects = getobjects(result,figure,f)
- local fignum = tonumber((figure:filename()):match("([%d]+)$") or figure:charcode() or 0)
+ local fignum = tonumber(match(figure:filename(),"([%d]+)$") or figure:charcode() or 0)
local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
local bbox = figure:boundingbox()
local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4] -- faster than unpack
diff --git a/Master/texmf-dist/tex/generic/context/luatex-test.tex b/Master/texmf-dist/tex/generic/context/luatex-test.tex
index 1c423ec5b2b..a142d163534 100644
--- a/Master/texmf-dist/tex/generic/context/luatex-test.tex
+++ b/Master/texmf-dist/tex/generic/context/luatex-test.tex
@@ -32,6 +32,12 @@
% \testy این یک متن نمونه است با قلم ذر که درست آمده است.
% \font\testz=name:linlibertineo \testz
+\pdfprotrudechars2 \pdfadjustspacing2
+
+\font\testb=file:lmroman12-regular:+liga;extend=1.5 at 12pt \testb \input tufte \par
+\font\testb=file:lmroman12-regular:+liga;slant=0.8 at 12pt \testb \input tufte \par
+\font\testb=file:lmroman12-regular:+liga;protrusion=default at 12pt \testb \input tufte \par
+
\setmplibformat{plain}
\mplibcode
diff --git a/Master/texmf-dist/tex/generic/context/m-ch-en.tex b/Master/texmf-dist/tex/generic/context/m-ch-en.tex
index 8c55a5669c4..6bd4353199b 100644
--- a/Master/texmf-dist/tex/generic/context/m-ch-en.tex
+++ b/Master/texmf-dist/tex/generic/context/m-ch-en.tex
@@ -1,6 +1,6 @@
-% name : PPCHTEX / english interface
+% name : PPCHTEX / english interface
% version : 1997.03.05
-% author : J. Hagen
+% author : J. Hagen
% copyright : J. Hagen, A.F. Otten
\chardef\interfacenumber=0