summaryrefslogtreecommitdiff
path: root/Master/texmf-dist/tex/context/base
diff options
context:
space:
mode:
authorKarl Berry <karl@freefriends.org>2016-05-08 22:44:34 +0000
committerKarl Berry <karl@freefriends.org>2016-05-08 22:44:34 +0000
commitfa5d18c2f98ae3bca364f3c19804bd42f8305512 (patch)
tree46de646df6b2cff7ab3793e4806f0e5b5a5aa741 /Master/texmf-dist/tex/context/base
parent43354cc71f70a0e237312e593234b6a4efab3eb2 (diff)
context
git-svn-id: svn://tug.org/texlive/trunk@40962 c570f23f-e606-0410-a88d-b1316a301751
Diffstat (limited to 'Master/texmf-dist/tex/context/base')
-rw-r--r--Master/texmf-dist/tex/context/base/context-version.pdfbin4249 -> 4253 bytes
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/back-pdf.lua11
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/char-ini.lua12
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/cont-new.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/context-todo.tex9
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/context.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/core-con.lua43
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/core-con.mkiv4
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/core-ctx.lua24
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/font-chk.lua13
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/font-enh.lua16
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/font-ext.lua89
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/font-fbk.lua12
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/font-gbn.lua20
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/font-gds.lua34
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/font-lib.mkvi9
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/font-one.lua1220
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/font-otc.lua27
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/font-otj.lua30
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/font-otl.lua8
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/font-oto.lua2
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/font-ots.lua3
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/lang-def.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/lang-ini.lua8
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/lang-ini.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/lpdf-ini.lua21
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/lpdf-xmp.lua88
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/luat-cnf.lua1
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/m-oldotf.mkiv1
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/meta-ini.mkiv4
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/mult-sys.mkiv2
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/node-fnt.lua15
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/status-files.pdfbin9119 -> 9257 bytes
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/status-lua.pdfbin268089 -> 268124 bytes
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/typo-tal.lua8
-rw-r--r--Master/texmf-dist/tex/context/base/mkiv/typo-wrp.mkiv1
36 files changed, 1534 insertions, 209 deletions
diff --git a/Master/texmf-dist/tex/context/base/context-version.pdf b/Master/texmf-dist/tex/context/base/context-version.pdf
index a1a04de92c1..fbb5c71c981 100644
--- a/Master/texmf-dist/tex/context/base/context-version.pdf
+++ b/Master/texmf-dist/tex/context/base/context-version.pdf
Binary files differ
diff --git a/Master/texmf-dist/tex/context/base/mkiv/back-pdf.lua b/Master/texmf-dist/tex/context/base/mkiv/back-pdf.lua
index 0821abb2895..323f1d57fcf 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/back-pdf.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/back-pdf.lua
@@ -25,6 +25,8 @@ local scankeyword = scanners.keyword
local scanners = interfaces.scanners
local implement = interfaces.implement
+local report = logs.reporter("backend")
+
local outputfilename
function codeinjections.getoutputfilename()
@@ -145,6 +147,15 @@ scanners.pdfstartmirroring = function()
context(pdfsetmatrix(-1,0,0,1))
end
+if environment.arguments.nocompression then
+ pdf.setcompresslevel(0)
+ pdf.setobjcompresslevel(0)
+ function pdf.setcompresslevel()
+ -- blocked from now on
+ end
+ pdf.setobjcompresslevel = pdf.setcompresslevel
+end
+
scanners.pdfstopmirroring = scanners.pdfstartmirroring
-- todo, change the above to implement too --
diff --git a/Master/texmf-dist/tex/context/base/mkiv/char-ini.lua b/Master/texmf-dist/tex/context/base/mkiv/char-ini.lua
index ad53cae8f34..63328a177a3 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/char-ini.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/char-ini.lua
@@ -1083,8 +1083,8 @@ if not characters.lhash then
lhash[utfchar(k)] = utfchar(l)
elseif #l == 2 then
lhash[utfchar(k)] = utfchar(l[1]) .. utfchar(l[2])
- else
- inspect(v)
+ -- else
+ -- inspect(v)
end
else
local u = v.uccode
@@ -1094,8 +1094,8 @@ if not characters.lhash then
uhash[utfchar(k)] = utfchar(u)
elseif #u == 2 then
uhash[utfchar(k)] = utfchar(u[1]) .. utfchar(u[2])
- else
- inspect(v)
+ -- else
+ -- inspect(v)
end
end
end
@@ -1105,8 +1105,8 @@ if not characters.lhash then
shash[utfchar(k)] = utfchar(s)
elseif #s == 2 then
shash[utfchar(k)] = utfchar(s[1]) .. utfchar(s[2])
- else
- inspect(v)
+ -- else
+ -- inspect(v)
end
end
-- end
diff --git a/Master/texmf-dist/tex/context/base/mkiv/cont-new.mkiv b/Master/texmf-dist/tex/context/base/mkiv/cont-new.mkiv
index 3e5ff1ffc1f..1b0ec7432ea 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/cont-new.mkiv
+++ b/Master/texmf-dist/tex/context/base/mkiv/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2016.05.01 09:52}
+\newcontextversion{2016.05.08 17:30}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/Master/texmf-dist/tex/context/base/mkiv/context-todo.tex b/Master/texmf-dist/tex/context/base/mkiv/context-todo.tex
index 0674ad9fbda..66889c4d37c 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/context-todo.tex
+++ b/Master/texmf-dist/tex/context/base/mkiv/context-todo.tex
@@ -18,6 +18,13 @@
\startitem
add \type {--output-filename} for \PDF\ filename
\stopitem
+ \startitem
+ more consistent \type {lang_variables} and \type {tex_language} in \type
+ {texlang.w} and also store the \type {*mins}
+ \stopitem
+ \startitem
+ get rid of \type {temp} node in hyphenator i.e. postpone to when needed
+ \stopitem
\stopitemize
\subsubject{\CONTEXT}
@@ -33,7 +40,7 @@
play with box attributes
\stopitem
\startitem
- check consistency between foonotes and running text (main color,
+ check consistency between footnotes and running text (main color,
styles, properties)
\stopitem
\startitem
diff --git a/Master/texmf-dist/tex/context/base/mkiv/context.mkiv b/Master/texmf-dist/tex/context/base/mkiv/context.mkiv
index 7ec285ed137..c66b2856a33 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/context.mkiv
+++ b/Master/texmf-dist/tex/context/base/mkiv/context.mkiv
@@ -39,7 +39,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2016.05.01 09:52}
+\edef\contextversion{2016.05.08 17:30}
\edef\contextkind {beta}
%D For those who want to use this:
diff --git a/Master/texmf-dist/tex/context/base/mkiv/core-con.lua b/Master/texmf-dist/tex/context/base/mkiv/core-con.lua
index bfe5357a6ec..6913ac569a2 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/core-con.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/core-con.lua
@@ -19,8 +19,9 @@ slower but look nicer this way.</p>
local floor, date, time, concat = math.floor, os.date, os.time, table.concat
local lower, upper, rep, match, gsub = string.lower, string.upper, string.rep, string.match, string.gsub
local utfchar, utfbyte = utf.char, utf.byte
-local tonumber, tostring = tonumber, tostring
-local P, C, Cs, lpegmatch = lpeg.P, lpeg.C, lpeg.Cs, lpeg.match
+local tonumber, tostring, type, rawset = tonumber, tostring, type, rawset
+local P, S, R, Cc, Cf, Cg, Ct, Cs, C = lpeg.P, lpeg.S, lpeg.R, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Ct, lpeg.Cs, lpeg.C
+local lpegmatch = lpeg.match
local context = context
local commands = commands
@@ -33,6 +34,8 @@ local formatters = string.formatters
local variables = interfaces.variables
local constants = interfaces.constants
+local texset = tex.set
+
converters = converters or { }
local converters = converters
@@ -1355,3 +1358,39 @@ implement {
actions = { formatters["U+%05X"], context },
arguments = "integer"
}
+
+local n = lpeg.R("09")^1 / tonumber
+
+local p = Cf( Ct("")
+ * Cg(Cc("year") * (n )) * P("-")^-1
+ * Cg(Cc("month") * (n + Cc( 1))) * P("-")^-1
+ * Cg(Cc("day") * (n + Cc( 1))) * lpeg.patterns.whitespace^-1
+ * Cg(Cc("hour") * (n + Cc( 0))) * P(":")^-1
+ * Cg(Cc("min") * (n + Cc( 0)))
+ , rawset)
+
+function converters.totime(s)
+ if not s then
+ return
+ elseif type(s) == "table" then
+ return s
+ elseif type(s) == "string" then
+ return lpegmatch(p,s)
+ end
+ local n = tonumber(s)
+ if n and n >= 0 then
+ return date("*t",n)
+ end
+end
+
+function converters.settime(t)
+ if type(t) ~= "table" then
+ t = converters.totime(t)
+ end
+ if t then
+ texset("year", t.year or 1000)
+ texset("month", t.month or 1)
+ texset("day", t.day or 1)
+ texset("time", (t.hour or 0) * 60 + (t.min or 0))
+ end
+end
diff --git a/Master/texmf-dist/tex/context/base/mkiv/core-con.mkiv b/Master/texmf-dist/tex/context/base/mkiv/core-con.mkiv
index 1f6dc54d5e6..8565a30965b 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/core-con.mkiv
+++ b/Master/texmf-dist/tex/context/base/mkiv/core-con.mkiv
@@ -250,8 +250,8 @@
\newcount\normalweekday
- \def\dayoftheweek #1#2#3{\clf_weekdayname\numexpr#1\relax\numexpr#2\relax,\numexpr#3\relax} % name
-\unexpanded\def\getdayoftheweek#1#2#3{\normalweekday\clf_weekday\numexpr#1\relax\numexpr#2\relax,\numexpr#3\relax\relax} % number
+ \def\dayoftheweek #1#2#3{\clf_weekdayname\numexpr#1\relax\numexpr#2\relax\numexpr#3\relax} % name
+\unexpanded\def\getdayoftheweek#1#2#3{\normalweekday\clf_weekday\numexpr#1\relax\numexpr#2\relax\numexpr#3\relax\relax} % number
%D Using this macro in
%D
diff --git a/Master/texmf-dist/tex/context/base/mkiv/core-ctx.lua b/Master/texmf-dist/tex/context/base/mkiv/core-ctx.lua
index c20691cd7be..1f22402e65b 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/core-ctx.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/core-ctx.lua
@@ -37,6 +37,7 @@ local gsub, find, match, validstring = string.gsub, string.find, string.match, s
local concat = table.concat
local xmltext = xml.text
+local report_jobfile = logs.reporter("system","jobfile")
local report_prepfiles = logs.reporter("system","prepfiles")
local commands = commands
@@ -94,6 +95,8 @@ end
function ctxrunner.load(ctxname)
+ report_jobfile("processing %a",ctxname)
+
local xmldata = xml.load(ctxname)
local jobname = tex.jobname -- todo
@@ -138,7 +141,7 @@ function ctxrunner.load(ctxname)
end
for e in xml.collected(xmldata,"ctx:message") do
- report_prepfiles("ctx comment: %s", xmltext(e))
+ report_jobfile("ctx comment: %s", xmltext(e))
end
for r, d, k in xml.elements(xmldata,"ctx:value[@name='job']") do
@@ -192,17 +195,21 @@ function ctxrunner.load(ctxname)
pattern =justtext(xml.tostring(pattern))
if preprocessor and preprocessor ~= "" and pattern and pattern ~= "" then
local noftreatments = #treatments + 1
- local findpattern = string.topattern(pattern)
+ local findpattern = string.topattern(pattern)
local preprocessors = utilities.parsers.settings_to_array(preprocessor)
treatments[noftreatments] = {
pattern = findpattern,
preprocessors = preprocessors,
}
- report_prepfiles("step %s, pattern %a, preprocessor: %a",noftreatments,findpattern,preprocessors)
+ report_jobfile("step %s, pattern %a, preprocessor: %a",noftreatments,findpattern,preprocessors)
end
end
end
+ if #treatments == 0 then
+ report_jobfile("no treatments needed")
+ end
+
local function needstreatment(oldfile)
for i=1,#treatments do
local treatment = treatments[i]
@@ -215,6 +222,8 @@ function ctxrunner.load(ctxname)
local preparefile = #treatments > 0 and function(prepfiles,filename)
+ filename = file.collapsepath(filename)
+
local treatment = needstreatment(filename)
local oldfile = filename
local newfile = false
@@ -244,9 +253,12 @@ function ctxrunner.load(ctxname)
end
end
end
+ oldname = file.collapsepath(oldname)
+ newname = file.collapsepath(newname)
if not newfile then
newfile = filename
- elseif file.needsupdating(filename,newfile) then
+ report_prepfiles("%a is not converted to %a",filename,newfile)
+ elseif not lfs.isfile(newfile) or file.needsupdating(filename,newfile) then
for i=1,#runners do
report_prepfiles("step %i: %s",i,runners[i])
end
@@ -258,6 +270,8 @@ function ctxrunner.load(ctxname)
-- if result > 0 then
-- report_prepfiles("error, return code: %s",result)
-- end
+ logs.newline()
+ logs.newline()
end
if lfs.isfile(newfile) then
file.syncmtimes(filename,newfile)
@@ -268,6 +282,8 @@ function ctxrunner.load(ctxname)
end
elseif lfs.isfile(newfile) then
report_prepfiles("%a is already converted to %a",filename,newfile)
+ else
+ report_prepfiles("unknown error when converting %a to %a",filename,newfile)
end
else
newfile = filename
diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-chk.lua b/Master/texmf-dist/tex/context/base/mkiv/font-chk.lua
index 5b1ad992070..15291052fe3 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/font-chk.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/font-chk.lua
@@ -437,7 +437,7 @@ local function adddummysymbols(tfmdata,...)
-- end
end
-registerotffeature {
+local dummies_specification = {
name = "dummies",
description = "dummy symbols",
default = true,
@@ -447,15 +447,8 @@ registerotffeature {
}
}
-registerafmfeature {
- name = "dummies",
- description = "dummy symbols",
- default = true,
- manipulators = {
- base = adddummysymbols,
- node = adddummysymbols,
- }
-}
+registerotffeature(dummies_specification)
+registerafmfeature(dummies_specification)
-- callback.register("char_exists",function(f,c) -- to slow anyway as called often so we should flag in tfmdata
-- return true
diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-enh.lua b/Master/texmf-dist/tex/context/base/mkiv/font-enh.lua
index 3439a434ab4..f3209f5ee10 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/font-enh.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/font-enh.lua
@@ -173,7 +173,7 @@ local function initializeunicoding(tfmdata)
end
end
-registerafmfeature {
+local unicoding_specification = {
name = "unicoding",
description = "adapt unicode table",
initializers = {
@@ -186,15 +186,5 @@ registerafmfeature {
-- }
}
-registerotffeature {
- name = "unicoding",
- description = "adapt unicode table",
- initializers = {
- base = initializeunicoding,
- node = initializeunicoding,
- },
- -- manipulators = {
- -- base = finalizeunicoding,
- -- node = finalizeunicoding,
- -- }
-}
+registerotffeature(unicoding_specification)
+registerafmfeature(unicoding_specification)
diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-ext.lua b/Master/texmf-dist/tex/context/base/mkiv/font-ext.lua
index 79144aa70e3..189a588f146 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/font-ext.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/font-ext.lua
@@ -149,7 +149,7 @@ local function initializeexpansion(tfmdata,value)
end
end
-registerotffeature {
+local expansion_specification = {
name = "expansion",
description = "apply hz optimization",
initializers = {
@@ -158,14 +158,8 @@ registerotffeature {
}
}
-registerafmfeature {
- name = "expansion",
- description = "apply hz optimization",
- initializers = {
- base = initializeexpansion,
- node = initializeexpansion,
- }
-}
+registerotffeature(expansion_specification)
+registerafmfeature(expansion_specification)
fonts.goodies.register("expansions", function(...) return fonts.goodies.report("expansions", trace_expansion, ...) end)
@@ -466,7 +460,7 @@ local function initializeprotrusion(tfmdata,value)
end
end
-registerotffeature {
+local protrusion_specification = {
name = "protrusion",
description = "l/r margin character protrusion",
initializers = {
@@ -475,14 +469,8 @@ registerotffeature {
}
}
-registerafmfeature {
- name = "protrusion",
- description = "shift characters into the left and or right margin",
- initializers = {
- base = initializeprotrusion,
- node = initializeprotrusion,
- }
-}
+registerotffeature(protrusion_specification)
+registerafmfeature(protrusion_specification)
fonts.goodies.register("protrusions", function(...) return fonts.goodies.report("protrusions", trace_protrusion, ...) end)
@@ -534,7 +522,7 @@ local function initializeitlc(tfmdata,value) -- hm, always value
end
end
-registerotffeature {
+local italic_specification = {
name = "itlc",
description = "italic correction",
initializers = {
@@ -543,20 +531,14 @@ registerotffeature {
}
}
-registerafmfeature {
- name = "itlc",
- description = "italic correction",
- initializers = {
- base = initializeitlc,
- node = initializeitlc,
- }
-}
+registerotffeature(italic_specification)
+registerafmfeature(italic_specification)
local function initializetextitalics(tfmdata,value) -- yes no delay
tfmdata.properties.textitalics = toboolean(value)
end
-registerotffeature {
+local textitalics_specification = {
name = "textitalics",
description = "use alternative text italic correction",
initializers = {
@@ -565,20 +547,14 @@ registerotffeature {
}
}
-registerafmfeature {
- name = "textitalics",
- description = "use alternative text italic correction",
- initializers = {
- base = initializetextitalics,
- node = initializetextitalics,
- }
-}
+registerotffeature(textitalics_specification)
+registerafmfeature(textitalics_specification)
local function initializemathitalics(tfmdata,value) -- yes no delay
tfmdata.properties.mathitalics = toboolean(value)
end
-registerotffeature {
+local mathitalics_specification = {
name = "mathitalics",
description = "use alternative math italic correction",
initializers = {
@@ -587,14 +563,8 @@ registerotffeature {
}
}
-registerafmfeature {
- name = "mathitalics",
- description = "use alternative math italic correction",
- initializers = {
- base = initializemathitalics,
- node = initializemathitalics,
- }
-}
+registerotffeature(mathitalics_specification)
+registerafmfeature(mathitalics_specification)
-- slanting
@@ -610,7 +580,7 @@ local function initializeslant(tfmdata,value)
tfmdata.parameters.slantfactor = value
end
-registerotffeature {
+local slant_specification = {
name = "slant",
description = "slant glyphs",
initializers = {
@@ -619,14 +589,8 @@ registerotffeature {
}
}
-registerafmfeature {
- name = "slant",
- description = "slant glyphs",
- initializers = {
- base = initializeslant,
- node = initializeslant,
- }
-}
+registerotffeature(slant_specification)
+registerafmfeature(slant_specification)
local function initializeextend(tfmdata,value)
value = tonumber(value)
@@ -640,7 +604,7 @@ local function initializeextend(tfmdata,value)
tfmdata.parameters.extendfactor = value
end
-registerotffeature {
+local extend_specification = {
name = "extend",
description = "scale glyphs horizontally",
initializers = {
@@ -649,14 +613,8 @@ registerotffeature {
}
}
-registerafmfeature {
- name = "extend",
- description = "scale glyphs horizontally",
- initializers = {
- base = initializeextend,
- node = initializeextend,
- }
-}
+registerotffeature(extend_specification)
+registerafmfeature(extend_specification)
-- For Wolfgang Schuster:
--
@@ -745,7 +703,7 @@ local function manipulatedimensions(tfmdata,key,value)
end
end
-registerotffeature {
+local dimensions_specification = {
name = "dimensions",
description = "force dimensions",
manipulators = {
@@ -754,6 +712,9 @@ registerotffeature {
}
}
+registerotffeature(dimensions_specification)
+registerafmfeature(dimensions_specification)
+
-- for zhichu chen (see mailing list archive): we might add a few more variants
-- in due time
--
diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-fbk.lua b/Master/texmf-dist/tex/context/base/mkiv/font-fbk.lua
index 9ef0706d235..3734e8071bf 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/font-fbk.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/font-fbk.lua
@@ -251,7 +251,7 @@ local function composecharacters(tfmdata)
end
end
-registerotffeature {
+local compose_specification = {
name = "compose",
description = "additional composed characters",
manipulators = {
@@ -260,14 +260,8 @@ registerotffeature {
}
}
-registerafmfeature {
- name = "compose",
- description = "additional composed characters",
- manipulators = {
- base = composecharacters,
- node = composecharacters,
- }
-}
+registerotffeature(compose_specification)
+registerafmfeature(compose_specification)
vf.helpers.composecharacters = composecharacters
diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-gbn.lua b/Master/texmf-dist/tex/context/base/mkiv/font-gbn.lua
index daa072b4bc2..a02406b756f 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/font-gbn.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/font-gbn.lua
@@ -126,17 +126,19 @@ function nodes.handlers.nodepass(head)
local variant = hash[getchar(p)]
if variant then
setchar(p,variant)
- if not redundant then
- redundant = { n }
- else
- redundant[#redundant+1] = n
- end
end
end
end
+ -- per generic user request we always remove selectors
+ if not redundant then
+ redundant = { n }
+ else
+ redundant[#redundant+1] = n
+ end
end
end
end
+ local nofbasefonts = #basefonts
if redundant then
for i=1,#redundant do
local r = redundant[i]
@@ -147,8 +149,8 @@ function nodes.handlers.nodepass(head)
else
setlink(p,n)
end
- if b > 0 then
- for i=1,b do
+ if nofbasefonts > 0 then
+ for i=1,nofbasefonts do
local bi = basefonts[i]
if r == bi[1] then
bi[1] = n
@@ -192,8 +194,8 @@ function nodes.handlers.nodepass(head)
end
end
end
- if basemodepass and #basefonts > 0 then
- for i=1,#basefonts do
+ if basemodepass and nofbasefonts > 0 then
+ for i=1,nofbasefonts do
local range = basefonts[i]
local start = range[1]
local stop = range[2]
diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-gds.lua b/Master/texmf-dist/tex/context/base/mkiv/font-gds.lua
index 23bbad42f63..52bb9c98352 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/font-gds.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/font-gds.lua
@@ -465,9 +465,9 @@ local function setextensions(tfmdata)
end
end
--- installation (collected to keep the overview) -- also for type 1
+-- installation
-registerotffeature {
+local goodies_specification = {
name = "goodies",
description = "goodies on top of built in features",
initializers = {
@@ -477,6 +477,12 @@ registerotffeature {
}
}
+registerotffeature(goodies_specification)
+registerafmfeature(goodies_specification)
+registertfmfeature(goodies_specification)
+
+-- maybe more of the following could be for type one too
+
registerotffeature {
name = "extrafeatures",
description = "extra features",
@@ -527,30 +533,6 @@ registerotffeature {
}
}
--- afm
-
-registerafmfeature {
- name = "goodies",
- description = "goodies on top of built in features",
- initializers = {
- position = 1,
- base = setgoodies,
- node = setgoodies,
- }
-}
-
--- tfm
-
-registertfmfeature {
- name = "goodies",
- description = "goodies on top of built in features",
- initializers = {
- position = 1,
- base = setgoodies,
- node = setgoodies,
- }
-}
-
-- experiment, we have to load the definitions immediately as they precede
-- the definition so they need to be initialized in the typescript
diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-lib.mkvi b/Master/texmf-dist/tex/context/base/mkiv/font-lib.mkvi
index 7bb042a5943..90de409d138 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/font-lib.mkvi
+++ b/Master/texmf-dist/tex/context/base/mkiv/font-lib.mkvi
@@ -36,9 +36,6 @@
\registerctxluafile{font-tfm}{1.001}
-\registerctxluafile{font-afm}{1.001}
-\registerctxluafile{font-afk}{1.001}
-
\registerctxluafile{font-hsh}{1.001} % hashes used by context
\registerctxluafile{font-nod}{1.001}
@@ -58,6 +55,12 @@
\registerctxluafile{font-oth}{1.001}
\registerctxluafile{font-osd}{1.001}
+% we use otf code for type one
+
+\registerctxluafile{font-one}{1.001}
+%registerctxluafile{font-afm}{1.001}
+\registerctxluafile{font-afk}{1.001}
+
% so far
\registerctxluafile{font-pat}{1.001} % patchers
diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-one.lua b/Master/texmf-dist/tex/context/base/mkiv/font-one.lua
new file mode 100644
index 00000000000..3602fd4650a
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/mkiv/font-one.lua
@@ -0,0 +1,1220 @@
+if not modules then modules = { } end modules ['font-one'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+<p>Some code may look a bit obscure but this has to do with the fact that we also use
+this code for testing and much code evolved in the transition from <l n='tfm'/> to
+<l n='afm'/> to <l n='otf'/>.</p>
+
+<p>The following code still has traces of intermediate font support where we handles
+font encodings. Eventually font encoding went away but we kept some code around in
+other modules.</p>
+
+<p>This version implements a node mode approach so that users can also more easily
+add features.</p>
+--ldx]]--
+
+local fonts, logs, trackers, containers, resolvers = fonts, logs, trackers, containers, resolvers
+
+local next, type, tonumber = next, type, tonumber
+local match, gmatch, lower, gsub, strip, find = string.match, string.gmatch, string.lower, string.gsub, string.strip, string.find
+local char, byte, sub = string.char, string.byte, string.sub
+local abs = math.abs
+local bxor, rshift = bit32.bxor, bit32.rshift
+local P, S, R, Cmt, C, Ct, Cs, lpegmatch, patterns = lpeg.P, lpeg.S, lpeg.R, lpeg.Cmt, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.match, lpeg.patterns
+local derivetable = table.derive
+
+local trace_features = false trackers.register("afm.features", function(v) trace_features = v end)
+local trace_indexing = false trackers.register("afm.indexing", function(v) trace_indexing = v end)
+local trace_loading = false trackers.register("afm.loading", function(v) trace_loading = v end)
+local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+
+local report_afm = logs.reporter("fonts","afm loading")
+
+local setmetatableindex = table.setmetatableindex
+
+local findbinfile = resolvers.findbinfile
+
+local definers = fonts.definers
+local readers = fonts.readers
+local constructors = fonts.constructors
+
+local afm = constructors.newhandler("afm")
+local pfb = constructors.newhandler("pfb")
+local otf = fonts.handlers.otf
+
+local otfreaders = otf.readers
+local otfenhancers = otf.enhancers
+
+local afmfeatures = constructors.newfeatures("afm")
+local registerafmfeature = afmfeatures.register
+
+afm.version = 1.505 -- incrementing this number one up will force a re-cache
+afm.cache = containers.define("fonts", "afm", afm.version, true)
+afm.autoprefixed = true -- this will become false some day (catches texnansi-blabla.*)
+
+afm.helpdata = { } -- set later on so no local for this
+afm.syncspace = true -- when true, nicer stretch values
+
+local overloads = fonts.mappings.overloads
+
+local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes
+
+--[[ldx--
+<p>We start with the basic reader which we give a name similar to the
+built in <l n='tfm'/> and <l n='otf'/> reader.</p>
+--ldx]]--
+
+--~ Comment FONTIDENTIFIER LMMATHSYMBOLS10
+--~ Comment CODINGSCHEME TEX MATH SYMBOLS
+--~ Comment DESIGNSIZE 10.0 pt
+--~ Comment CHECKSUM O 4261307036
+--~ Comment SPACE 0 plus 0 minus 0
+--~ Comment QUAD 1000
+--~ Comment EXTRASPACE 0
+--~ Comment NUM 676.508 393.732 443.731
+--~ Comment DENOM 685.951 344.841
+--~ Comment SUP 412.892 362.892 288.889
+--~ Comment SUB 150 247.217
+--~ Comment SUPDROP 386.108
+--~ Comment SUBDROP 50
+--~ Comment DELIM 2390 1010
+--~ Comment AXISHEIGHT 250
+
+local comment = P("Comment")
+local spacing = patterns.spacer -- S(" \t")^1
+local lineend = patterns.newline -- S("\n\r")
+local words = C((1 - lineend)^1)
+local number = C((R("09") + S("."))^1) / tonumber * spacing^0
+local data = lpeg.Carg(1)
+
+local pattern = ( -- needs testing ... not used anyway as we no longer need math afm's
+ comment * spacing *
+ (
+ data * (
+ ("CODINGSCHEME" * spacing * words ) / function(fd,a) end +
+ ("DESIGNSIZE" * spacing * number * words ) / function(fd,a) fd[ 1] = a end +
+ ("CHECKSUM" * spacing * number * words ) / function(fd,a) fd[ 2] = a end +
+ ("SPACE" * spacing * number * "plus" * number * "minus" * number) / function(fd,a,b,c) fd[ 3], fd[ 4], fd[ 5] = a, b, c end +
+ ("QUAD" * spacing * number ) / function(fd,a) fd[ 6] = a end +
+ ("EXTRASPACE" * spacing * number ) / function(fd,a) fd[ 7] = a end +
+ ("NUM" * spacing * number * number * number ) / function(fd,a,b,c) fd[ 8], fd[ 9], fd[10] = a, b, c end +
+ ("DENOM" * spacing * number * number ) / function(fd,a,b ) fd[11], fd[12] = a, b end +
+ ("SUP" * spacing * number * number * number ) / function(fd,a,b,c) fd[13], fd[14], fd[15] = a, b, c end +
+ ("SUB" * spacing * number * number ) / function(fd,a,b) fd[16], fd[17] = a, b end +
+ ("SUPDROP" * spacing * number ) / function(fd,a) fd[18] = a end +
+ ("SUBDROP" * spacing * number ) / function(fd,a) fd[19] = a end +
+ ("DELIM" * spacing * number * number ) / function(fd,a,b) fd[20], fd[21] = a, b end +
+ ("AXISHEIGHT" * spacing * number ) / function(fd,a) fd[22] = a end
+ )
+ + (1-lineend)^0
+ )
+ + (1-comment)^1
+)^0
+
+local function scan_comment(str)
+ local fd = { }
+ lpegmatch(pattern,str,1,fd)
+ return fd
+end
+
+-- On a rainy day I will rewrite this in lpeg ... or we can use the (slower) fontloader
+-- as in now supports afm/pfb loading but it's not too bad to have different methods
+-- for testing approaches.
+
+local keys = { }
+
+function keys.FontName (data,line) data.metadata.fontname = strip (line) -- get rid of spaces
+ data.metadata.fullname = strip (line) end
+function keys.ItalicAngle (data,line) data.metadata.italicangle = tonumber (line) end
+function keys.IsFixedPitch(data,line) data.metadata.monospaced = toboolean(line,true) end
+function keys.CharWidth (data,line) data.metadata.charwidth = tonumber (line) end
+function keys.XHeight (data,line) data.metadata.xheight = tonumber (line) end
+function keys.Descender (data,line) data.metadata.descender = tonumber (line) end
+function keys.Ascender (data,line) data.metadata.ascender = tonumber (line) end
+function keys.Comment (data,line)
+ -- Comment DesignSize 12 (pts)
+ -- Comment TFM designsize: 12 (in points)
+ line = lower(line)
+ local designsize = match(line,"designsize[^%d]*(%d+)")
+ if designsize then data.metadata.designsize = tonumber(designsize) end
+end
+
+local function get_charmetrics(data,charmetrics,vector)
+ local characters = data.characters
+ local chr, ind = { }, 0
+ for k, v in gmatch(charmetrics,"([%a]+) +(.-) *;") do
+ if k == 'C' then
+ v = tonumber(v)
+ if v < 0 then
+ ind = ind + 1 -- ?
+ else
+ ind = v
+ end
+ chr = {
+ index = ind
+ }
+ elseif k == 'WX' then
+ chr.width = tonumber(v)
+ elseif k == 'N' then
+ characters[v] = chr
+ elseif k == 'B' then
+ local llx, lly, urx, ury = match(v,"^ *(.-) +(.-) +(.-) +(.-)$")
+ chr.boundingbox = { tonumber(llx), tonumber(lly), tonumber(urx), tonumber(ury) }
+ elseif k == 'L' then
+ local plus, becomes = match(v,"^(.-) +(.-)$")
+ local ligatures = chr.ligatures
+ if ligatures then
+ ligatures[plus] = becomes
+ else
+ chr.ligatures = { [plus] = becomes }
+ end
+ end
+ end
+end
+
+local function get_kernpairs(data,kernpairs)
+ local characters = data.characters
+ for one, two, value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do
+ local chr = characters[one]
+ if chr then
+ local kerns = chr.kerns
+ if kerns then
+ kerns[two] = tonumber(value)
+ else
+ chr.kerns = { [two] = tonumber(value) }
+ end
+ end
+ end
+end
+
+local function get_variables(data,fontmetrics)
+ for key, rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do
+ local keyhandler = keys[key]
+ if keyhandler then
+ keyhandler(data,rest)
+ end
+ end
+end
+
+-- new (unfinished) pfb loader but i see no differences between
+-- old and new (one bad vector with old)
+
+local get_indexes
+
+do
+
+ local n, m
+
+ local progress = function(str,position,name,size)
+ local forward = position + tonumber(size) + 3 + 2
+ n = n + 1
+ if n >= m then
+ return #str, name
+ elseif forward < #str then
+ return forward, name
+ else
+ return #str, name
+ end
+ end
+
+ local initialize = function(str,position,size)
+ n = 0
+ m = tonumber(size)
+ return position + 1
+ end
+
+ local charstrings = P("/CharStrings")
+ local name = P("/") * C((R("az")+R("AZ")+R("09")+S("-_."))^1)
+ local size = C(R("09")^1)
+ local spaces = P(" ")^1
+
+ local p_filternames = Ct (
+ (1-charstrings)^0 * charstrings * spaces * Cmt(size,initialize)
+ * (Cmt(name * P(" ")^1 * C(R("09")^1), progress) + P(1))^1
+ )
+
+ -- if one of first 4 not 0-9A-F then binary else hex
+
+ local decrypt
+
+ do
+
+ local r, c1, c2, n = 0, 0, 0, 0
+
+ local function step(c)
+ local cipher = byte(c)
+ local plain = bxor(cipher,rshift(r,8))
+ r = ((cipher + r) * c1 + c2) % 65536
+ return char(plain)
+ end
+
+ decrypt = function(binary)
+ r, c1, c2, n = 55665, 52845, 22719, 4
+ binary = gsub(binary,".",step)
+ return sub(binary,n+1)
+ end
+
+ -- local pattern = Cs((P(1) / step)^1)
+ --
+ -- decrypt = function(binary)
+ -- r, c1, c2, n = 55665, 52845, 22719, 4
+ -- binary = lpegmatch(pattern,binary)
+ -- return sub(binary,n+1)
+ -- end
+
+ end
+
+ local function loadpfbvector(filename)
+ -- for the moment limited to encoding only
+
+ local data = io.loaddata(resolvers.findfile(filename))
+
+ if not find(data,"!PS%-AdobeFont%-") then
+ print("no font",filename)
+ return
+ end
+
+ if not data then
+ print("no data",filename)
+ return
+ end
+
+ local ascii, binary = match(data,"(.*)eexec%s+......(.*)")
+
+ if not binary then
+ print("no binary",filename)
+ return
+ end
+
+ binary = decrypt(binary,4)
+
+ local vector = lpegmatch(p_filternames,binary)
+
+ vector[0] = table.remove(vector,1)
+
+ if not vector then
+ print("no vector",filename)
+ return
+ end
+
+ return vector
+
+ end
+
+ get_indexes = function(data,pfbname)
+ local vector = loadpfbvector(pfbname)
+ if vector then
+ local characters = data.characters
+ if trace_loading then
+ report_afm("getting index data from %a",pfbname)
+ end
+ for index=1,#vector do
+ local name = vector[index]
+ local char = characters[name]
+ if char then
+ if trace_indexing then
+ report_afm("glyph %a has index %a",name,index)
+ end
+ char.index = index
+ end
+ end
+ end
+ end
+
+end
+
+local function readafm(filename)
+ local ok, afmblob, size = resolvers.loadbinfile(filename) -- has logging
+ if ok and afmblob then
+ local data = {
+ resources = {
+ filename = resolvers.unresolve(filename),
+ version = afm.version,
+ creator = "context mkiv",
+ },
+ properties = {
+ hasitalics = false,
+ },
+ goodies = {
+ },
+ metadata = {
+ filename = file.removesuffix(file.basename(filename))
+ },
+ characters = {
+ -- a temporary store
+ },
+ descriptions = {
+ -- the final store
+ },
+ }
+ afmblob = gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics", function(charmetrics)
+ if trace_loading then
+ report_afm("loading char metrics")
+ end
+ get_charmetrics(data,charmetrics,vector)
+ return ""
+ end)
+ afmblob = gsub(afmblob,"StartKernPairs(.-)EndKernPairs", function(kernpairs)
+ if trace_loading then
+ report_afm("loading kern pairs")
+ end
+ get_kernpairs(data,kernpairs)
+ return ""
+ end)
+ afmblob = gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics", function(version,fontmetrics)
+ if trace_loading then
+ report_afm("loading variables")
+ end
+ data.afmversion = version
+ get_variables(data,fontmetrics)
+ data.fontdimens = scan_comment(fontmetrics) -- todo: all lpeg, no time now
+ return ""
+ end)
+ return data
+ else
+ if trace_loading then
+ report_afm("no valid afm file %a",filename)
+ end
+ return nil
+ end
+end
+
+--[[ldx--
+<p>We cache files. Caching is taken care of in the loader. We cheat a bit by adding
+ligatures and kern information to the afm derived data. That way we can set them faster
+when defining a font.</p>
+
+<p>We still keep the loading two phased: first we load the data in a traditional
+fashion and later we transform it to sequences.</p>
+--ldx]]--
+
+local addkerns, unify, normalize, fixnames, addligatures, addtexligatures
+
+function afm.load(filename)
+ filename = resolvers.findfile(filename,'afm') or ""
+ if filename ~= "" and not fonts.names.ignoredfile(filename) then
+ local name = file.removesuffix(file.basename(filename))
+ local data = containers.read(afm.cache,name)
+ local attr = lfs.attributes(filename)
+ local size, time = attr.size or 0, attr.modification or 0
+ --
+ local pfbfile = file.replacesuffix(name,"pfb")
+ local pfbname = resolvers.findfile(pfbfile,"pfb") or ""
+ if pfbname == "" then
+ pfbname = resolvers.findfile(file.basename(pfbfile),"pfb") or ""
+ end
+ local pfbsize, pfbtime = 0, 0
+ if pfbname ~= "" then
+ local attr = lfs.attributes(pfbname)
+ pfbsize = attr.size or 0
+ pfbtime = attr.modification or 0
+ end
+ if not data or data.size ~= size or data.time ~= time or data.pfbsize ~= pfbsize or data.pfbtime ~= pfbtime then
+ report_afm("reading %a",filename)
+ data = readafm(filename)
+ if data then
+ if pfbname ~= "" then
+ data.resources.filename = resolvers.unresolve(pfbname)
+ get_indexes(data,pfbname)
+ elseif trace_loading then
+ report_afm("no pfb file for %a",filename)
+ -- data.resources.filename = "unset" -- better than loading the afm file
+ end
+ -- we now have all the data loaded
+ if trace_loading then
+ report_afm("unifying %a",filename)
+ end
+ unify(data,filename)
+ if trace_loading then
+ report_afm("add ligatures") -- there can be missing ones
+ end
+ addligatures(data)
+ if trace_loading then
+ report_afm("add extra kerns")
+ end
+ addkerns(data)
+ if trace_loading then
+ report_afm("normalizing")
+ end
+ normalize(data)
+ if trace_loading then
+ report_afm("fixing names")
+ end
+ fixnames(data)
+ if trace_loading then
+ report_afm("add tounicode data")
+ end
+ -- otfreaders.addunicodetable(data) -- only when not done yet
+ fonts.mappings.addtounicode(data,filename)
+ -- otfreaders.extend(data)
+ otfreaders.pack(data)
+ data.size = size
+ data.time = time
+ data.pfbsize = pfbsize
+ data.pfbtime = pfbtime
+ report_afm("saving %a in cache",name)
+ -- data.resources.unicodes = nil -- consistent with otf but here we save not much
+ data = containers.write(afm.cache, name, data)
+ data = containers.read(afm.cache,name)
+ end
+ end
+ if data then
+ -- constructors.addcoreunicodes(unicodes)
+ otfreaders.unpack(data)
+ otfreaders.expand(data) -- inline tables
+ otfreaders.addunicodetable(data) -- only when not done yet
+ otfenhancers.apply(data,filename,data)
+ if applyruntimefixes then
+ applyruntimefixes(filename,data)
+ end
+ end
+ return data
+ else
+ return nil
+ end
+end
+
+local uparser = fonts.mappings.makenameparser()
+
+unify = function(data, filename)
+ local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context
+ local unicodes = { }
+ local names = { }
+ local private = constructors.privateoffset
+ local descriptions = data.descriptions
+ for name, blob in next, data.characters do
+ local code = unicodevector[name] -- or characters.name_to_unicode[name]
+ if not code then
+ code = lpegmatch(uparser,name)
+ if not code then
+ code = private
+ private = private + 1
+ report_afm("assigning private slot %U for unknown glyph name %a",code,name)
+ end
+ end
+ local index = blob.index
+ unicodes[name] = code
+ names[name] = index
+ blob.name = name
+ descriptions[code] = {
+ boundingbox = blob.boundingbox,
+ width = blob.width,
+ kerns = blob.kerns,
+ index = index,
+ name = name,
+ }
+ end
+ for unicode, description in next, descriptions do
+ local kerns = description.kerns
+ if kerns then
+ local krn = { }
+ for name, kern in next, kerns do
+ local unicode = unicodes[name]
+ if unicode then
+ krn[unicode] = kern
+ else
+ -- print(unicode,name)
+ end
+ end
+ description.kerns = krn
+ end
+ end
+ data.characters = nil
+ local resources = data.resources
+ local filename = resources.filename or file.removesuffix(file.basename(filename))
+ resources.filename = resolvers.unresolve(filename) -- no shortcut
+ resources.unicodes = unicodes -- name to unicode
+ resources.marks = { } -- todo
+ -- resources.names = names -- name to index
+ resources.private = private
+end
+
+local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } }
+local noflags = { false, false, false, false }
+
+normalize = function(data)
+ local ligatures = setmetatableindex("table")
+ local kerns = setmetatableindex("table")
+ local extrakerns = setmetatableindex("table")
+ for u, c in next, data.descriptions do
+ local l = c.ligatures
+ local k = c.kerns
+ local e = c.extrakerns
+ if l then
+ ligatures[u] = l
+ for u, v in next, l do
+ l[u] = { ligature = v }
+ end
+ c.ligatures = nil
+ end
+ if k then
+ kerns[u] = k
+ for u, v in next, k do
+ k[u] = v -- { v, 0 }
+ end
+ c.kerns = nil
+ end
+ if e then
+ extrakerns[u] = e
+ for u, v in next, e do
+ e[u] = v -- { v, 0 }
+ end
+ c.extrakerns = nil
+ end
+ end
+ local features = {
+ gpos = { },
+ gsub = { },
+ }
+ local sequences = {
+ -- only filled ones
+ }
+ if next(ligatures) then
+ features.gsub.liga = everywhere
+ data.properties.hasligatures = true
+ sequences[#sequences+1] = {
+ features = {
+ liga = everywhere,
+ },
+ flags = noflags,
+ name = "s_s_0",
+ nofsteps = 1,
+ order = { "liga" },
+ type = "gsub_ligature",
+ steps = {
+ {
+ coverage = ligatures,
+ },
+ },
+ }
+ end
+ if next(kerns) then
+ features.gpos.kern = everywhere
+ data.properties.haskerns = true
+ sequences[#sequences+1] = {
+ features = {
+ kern = everywhere,
+ },
+ flags = noflags,
+ name = "p_s_0",
+ nofsteps = 1,
+ order = { "kern" },
+ type = "gpos_pair",
+ steps = {
+ {
+ format = "kern",
+ coverage = kerns,
+ },
+ },
+ }
+ end
+ if next(extrakerns) then
+ features.gpos.extrakerns = everywhere
+ data.properties.haskerns = true
+ sequences[#sequences+1] = {
+ features = {
+ extrakerns = everywhere,
+ },
+ flags = noflags,
+ name = "p_s_1",
+ nofsteps = 1,
+ order = { "extrakerns" },
+ type = "gpos_pair",
+ steps = {
+ {
+ format = "kern",
+ coverage = extrakerns,
+ },
+ },
+ }
+ end
+ -- todo: compress kerns
+ data.resources.features = features
+ data.resources.sequences = sequences
+end
+
+fixnames = function(data)
+ for k, v in next, data.descriptions do
+ local n = v.name
+ local r = overloads[n]
+ if r then
+ local name = r.name
+ if trace_indexing then
+ report_afm("renaming characters %a to %a",n,name)
+ end
+ v.name = name
+ v.unicode = r.unicode
+ end
+ end
+end
+
+--[[ldx--
+<p>These helpers extend the basic table with extra ligatures, texligatures
+and extra kerns. This saves quite some lookups later.</p>
+--ldx]]--
+
+local addthem = function(rawdata,ligatures)
+ if ligatures then
+ local descriptions = rawdata.descriptions
+ local resources = rawdata.resources
+ local unicodes = resources.unicodes
+ -- local names = resources.names
+ for ligname, ligdata in next, ligatures do
+ local one = descriptions[unicodes[ligname]]
+ if one then
+ for _, pair in next, ligdata do
+ local two, three = unicodes[pair[1]], unicodes[pair[2]]
+ if two and three then
+ local ol = one.ligatures
+ if ol then
+ if not ol[two] then
+ ol[two] = three
+ end
+ else
+ one.ligatures = { [two] = three }
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+addligatures = function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end
+addtexligatures = function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end
+
+--[[ldx--
+<p>We keep the extra kerns in separate kerning tables so that we can use
+them selectively.</p>
+--ldx]]--
+
+-- This is rather old code (from the beginning when we had only tfm). If
+-- we unify the afm data (now we have names all over the place) then
+-- we can use shcodes but there will be many more looping then. But we
+-- could get rid of the tables in char-cmp then. Als, in the generic version
+-- we don't use the character database. (Ok, we can have a context specific
+-- variant).
+
+addkerns = function(rawdata) -- using shcodes is not robust here
+ local descriptions = rawdata.descriptions
+ local resources = rawdata.resources
+ local unicodes = resources.unicodes
+ local function do_it_left(what)
+ if what then
+ for unicode, description in next, descriptions do
+ local kerns = description.kerns
+ if kerns then
+ local extrakerns
+ for complex, simple in next, what do
+ complex = unicodes[complex]
+ simple = unicodes[simple]
+ if complex and simple then
+ local ks = kerns[simple]
+ if ks and not kerns[complex] then
+ if extrakerns then
+ extrakerns[complex] = ks
+ else
+ extrakerns = { [complex] = ks }
+ end
+ end
+ end
+ end
+ if extrakerns then
+ description.extrakerns = extrakerns
+ end
+ end
+ end
+ end
+ end
+ local function do_it_copy(what)
+ if what then
+ for complex, simple in next, what do
+ complex = unicodes[complex]
+ simple = unicodes[simple]
+ if complex and simple then
+ local complexdescription = descriptions[complex]
+ if complexdescription then -- optional
+ local simpledescription = descriptions[complex]
+ if simpledescription then
+ local extrakerns
+ local kerns = simpledescription.kerns
+ if kerns then
+ for unicode, kern in next, kerns do
+ if extrakerns then
+ extrakerns[unicode] = kern
+ else
+ extrakerns = { [unicode] = kern }
+ end
+ end
+ end
+ local extrakerns = simpledescription.extrakerns
+ if extrakerns then
+ for unicode, kern in next, extrakerns do
+ if extrakerns then
+ extrakerns[unicode] = kern
+ else
+ extrakerns = { [unicode] = kern }
+ end
+ end
+ end
+ if extrakerns then
+ complexdescription.extrakerns = extrakerns
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ -- add complex with values of simplified when present
+ do_it_left(afm.helpdata.leftkerned)
+ do_it_left(afm.helpdata.bothkerned)
+ -- copy kerns from simple char to complex char unless set
+ do_it_copy(afm.helpdata.bothkerned)
+ do_it_copy(afm.helpdata.rightkerned)
+end
+
+--[[ldx--
+<p>The copying routine looks messy (and is indeed a bit messy).</p>
+--ldx]]--
+
+local function adddimensions(data) -- we need to normalize afm to otf i.e. indexed table instead of name
+ if data then
+ for unicode, description in next, data.descriptions do
+ local bb = description.boundingbox
+ if bb then
+ local ht, dp = bb[4], -bb[2]
+ if ht == 0 or ht < 0 then
+ -- no need to set it and no negative heights, nil == 0
+ else
+ description.height = ht
+ end
+ if dp == 0 or dp < 0 then
+ -- no negative depths and no negative depths, nil == 0
+ else
+ description.depth = dp
+ end
+ end
+ end
+ end
+end
+
+local function copytotfm(data)
+ if data and data.descriptions then
+ local metadata = data.metadata
+ local resources = data.resources
+ local properties = derivetable(data.properties)
+ local descriptions = derivetable(data.descriptions)
+ local goodies = derivetable(data.goodies)
+ local characters = { }
+ local parameters = { }
+ local unicodes = resources.unicodes
+ --
+ for unicode, description in next, data.descriptions do -- use parent table
+ characters[unicode] = { }
+ end
+ --
+ local filename = constructors.checkedfilename(resources)
+ local fontname = metadata.fontname or metadata.fullname
+ local fullname = metadata.fullname or metadata.fontname
+ local endash = 0x0020 -- space
+ local emdash = 0x2014
+ local spacer = "space"
+ local spaceunits = 500
+ --
+ local monospaced = metadata.monospaced
+ local charwidth = metadata.charwidth
+ local italicangle = metadata.italicangle
+ local charxheight = metadata.xheight and metadata.xheight > 0 and metadata.xheight
+ properties.monospaced = monospaced
+ parameters.italicangle = italicangle
+ parameters.charwidth = charwidth
+ parameters.charxheight = charxheight
+ -- same as otf
+ if properties.monospaced then
+ if descriptions[endash] then
+ spaceunits, spacer = descriptions[endash].width, "space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits, spacer = descriptions[emdash].width, "emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits, spacer = charwidth, "charwidth"
+ end
+ else
+ if descriptions[endash] then
+ spaceunits, spacer = descriptions[endash].width, "space"
+ end
+ if not spaceunits and charwidth then
+ spaceunits, spacer = charwidth, "charwidth"
+ end
+ end
+ spaceunits = tonumber(spaceunits)
+ if spaceunits < 200 then
+ -- todo: warning
+ end
+ --
+ parameters.slant = 0
+ parameters.space = spaceunits
+ parameters.space_stretch = 500
+ parameters.space_shrink = 333
+ parameters.x_height = 400
+ parameters.quad = 1000
+ --
+ if italicangle and italicangle ~= 0 then
+ parameters.italicangle = italicangle
+ parameters.italicfactor = math.cos(math.rad(90+italicangle))
+ parameters.slant = - math.tan(italicangle*math.pi/180)
+ end
+ if monospaced then
+ parameters.space_stretch = 0
+ parameters.space_shrink = 0
+ elseif afm.syncspace then
+ parameters.space_stretch = spaceunits/2
+ parameters.space_shrink = spaceunits/3
+ end
+ parameters.extra_space = parameters.space_shrink
+ if charxheight then
+ parameters.x_height = charxheight
+ else
+ -- same as otf
+ local x = 0x0078 -- x
+ if x then
+ local x = descriptions[x]
+ if x then
+ parameters.x_height = x.height
+ end
+ end
+ --
+ end
+ local fd = data.fontdimens
+ if fd and fd[8] and fd[9] and fd[10] then -- math
+ for k,v in next, fd do
+ parameters[k] = v
+ end
+ end
+ --
+ parameters.designsize = (metadata.designsize or 10)*65536
+ parameters.ascender = abs(metadata.ascender or 0)
+ parameters.descender = abs(metadata.descender or 0)
+ parameters.units = 1000
+ --
+ properties.spacer = spacer
+ properties.encodingbytes = 2
+ properties.format = fonts.formats[filename] or "type1"
+ properties.filename = filename
+ properties.fontname = fontname
+ properties.fullname = fullname
+ properties.psname = fullname
+ properties.name = filename or fullname or fontname
+ --
+ if next(characters) then
+ return {
+ characters = characters,
+ descriptions = descriptions,
+ parameters = parameters,
+ resources = resources,
+ properties = properties,
+ goodies = goodies,
+ }
+ end
+ end
+ return nil
+end
+
+--[[ldx--
+<p>Originally we had features kind of hard coded for <l n='afm'/>
+files but since I expect to support more font formats, I decided
+to treat this fontformat like any other and handle features in a
+more configurable way.</p>
+--ldx]]--
+
+function afm.setfeatures(tfmdata,features)
+ local okay = constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm)
+ if okay then
+ return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm)
+ else
+ return { } -- will become false
+ end
+end
+
+local function addtables(data)
+ local resources = data.resources
+ local lookuptags = resources.lookuptags
+ local unicodes = resources.unicodes
+ if not lookuptags then
+ lookuptags = { }
+ resources.lookuptags = lookuptags
+ end
+ setmetatableindex(lookuptags,function(t,k)
+ local v = type(k) == "number" and ("lookup " .. k) or k
+ t[k] = v
+ return v
+ end)
+ if not unicodes then
+ unicodes = { }
+ resources.unicodes = unicodes
+ setmetatableindex(unicodes,function(t,k)
+ setmetatableindex(unicodes,nil)
+ for u, d in next, data.descriptions do
+ local n = d.name
+ if n then
+ t[n] = u
+ end
+ end
+ return rawget(t,k)
+ end)
+ end
+ constructors.addcoreunicodes(unicodes) -- do we really need this?
+end
+
+local function afmtotfm(specification)
+ local afmname = specification.filename or specification.name
+ if specification.forced == "afm" or specification.format == "afm" then -- move this one up
+ if trace_loading then
+ report_afm("forcing afm format for %a",afmname)
+ end
+ else
+ local tfmname = findbinfile(afmname,"ofm") or ""
+ if tfmname ~= "" then
+ if trace_loading then
+ report_afm("fallback from afm to tfm for %a",afmname)
+ end
+ return -- just that
+ end
+ end
+ if afmname ~= "" then
+ -- weird, isn't this already done then?
+ local features = constructors.checkedfeatures("afm",specification.features.normal)
+ specification.features.normal = features
+ constructors.hashinstance(specification,true) -- also weird here
+ --
+ specification = definers.resolve(specification) -- new, was forgotten
+ local cache_id = specification.hash
+ local tfmdata = containers.read(constructors.cache, cache_id) -- cache with features applied
+ if not tfmdata then
+ local rawdata = afm.load(afmname)
+ if rawdata and next(rawdata) then
+ addtables(rawdata)
+ adddimensions(rawdata)
+ tfmdata = copytotfm(rawdata)
+ if tfmdata and next(tfmdata) then
+ local shared = tfmdata.shared
+ if not shared then
+ shared = { }
+ tfmdata.shared = shared
+ end
+ shared.rawdata = rawdata
+ shared.dynamics = { }
+ tfmdata.changed = { }
+ shared.features = features
+ shared.processes = afm.setfeatures(tfmdata,features)
+ end
+ elseif trace_loading then
+ report_afm("no (valid) afm file found with name %a",afmname)
+ end
+ tfmdata = containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+ end
+end
+
+--[[ldx--
+<p>As soon as we could intercept the <l n='tfm'/> reader, I implemented an
+<l n='afm'/> reader. Since traditional <l n='pdftex'/> could use <l n='opentype'/>
+fonts with <l n='afm'/> companions, the following method also could handle
+those cases, but now that we can handle <l n='opentype'/> directly we no longer
+need this features.</p>
+--ldx]]--
+
+local function read_from_afm(specification)
+ local tfmdata = afmtotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name = specification.name
+ tfmdata = constructors.scale(tfmdata, specification)
+ local allfeatures = tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm)
+ fonts.loggers.register(tfmdata,'afm',specification)
+ end
+ return tfmdata
+end
+
+--[[ldx--
+<p>Here comes the implementation of a few features. We only implement
+those that make sense for this format.</p>
+--ldx]]--
+
+local function prepareligatures(tfmdata,ligatures,value)
+ if value then
+ local descriptions = tfmdata.descriptions
+ local hasligatures = false
+ for unicode, character in next, tfmdata.characters do
+ local description = descriptions[unicode]
+ local dligatures = description.ligatures
+ if dligatures then
+ local cligatures = character.ligatures
+ if not cligatures then
+ cligatures = { }
+ character.ligatures = cligatures
+ end
+ for unicode, ligature in next, dligatures do
+ cligatures[unicode] = {
+ char = ligature,
+ type = 0
+ }
+ end
+ hasligatures = true
+ end
+ end
+ tfmdata.properties.hasligatures = hasligatures
+ end
+end
+
+local function preparekerns(tfmdata,kerns,value)
+ if value then
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata.resources
+ local unicodes = resources.unicodes
+ local descriptions = tfmdata.descriptions
+ local haskerns = false
+ for u, chr in next, tfmdata.characters do
+ local d = descriptions[u]
+ local newkerns = d[kerns]
+ if newkerns then
+ local kerns = chr.kerns
+ if not kerns then
+ kerns = { }
+ chr.kerns = kerns
+ end
+ for k,v in next, newkerns do
+ local uk = unicodes[k]
+ if uk then
+ kerns[uk] = v
+ end
+ end
+ haskerns = true
+ end
+ end
+ tfmdata.properties.haskerns = haskerns
+ end
+end
+
+local list = {
+ -- [0x0022] = 0x201D,
+ [0x0027] = 0x2019,
+ -- [0x0060] = 0x2018,
+}
+
+local function texreplacements(tfmdata,value)
+ local descriptions = tfmdata.descriptions
+ local characters = tfmdata.characters
+ for k, v in next, list do
+ characters [k] = characters [v] -- we forget about kerns
+ descriptions[k] = descriptions[v] -- we forget about kerns
+ end
+end
+
+-- local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures', value) end
+-- local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end
+-- local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns', value) end
+local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns', value) end
+
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode = lower(value)
+ end
+end
+
+registerafmfeature {
+ name = "mode",
+ description = "mode",
+ initializers = {
+ base = setmode,
+ node = setmode,
+ }
+}
+
+registerafmfeature {
+ name = "features",
+ description = "features",
+ default = true,
+ initializers = {
+ node = otf.nodemodeinitializer,
+ base = otf.basemodeinitializer,
+ },
+ processors = {
+ node = otf.featuresprocessor,
+ }
+}
+
+-- readers
+
+local check_tfm = readers.check_tfm
+
+fonts.formats.afm = "type1"
+fonts.formats.pfb = "type1"
+
+local function check_afm(specification,fullname)
+ local foundname = findbinfile(fullname, 'afm') or "" -- just to be sure
+ if foundname == "" then
+ foundname = fonts.names.getfilename(fullname,"afm") or ""
+ end
+ if foundname == "" and afm.autoprefixed then
+ local encoding, shortname = match(fullname,"^(.-)%-(.*)$") -- context: encoding-name.*
+ if encoding and shortname and fonts.encodings.known[encoding] then
+ shortname = findbinfile(shortname,'afm') or "" -- just to be sure
+ if shortname ~= "" then
+ foundname = shortname
+ if trace_defining then
+ report_afm("stripping encoding prefix from filename %a",afmname)
+ end
+ end
+ end
+ end
+ if foundname ~= "" then
+ specification.filename = foundname
+ specification.format = "afm"
+ return read_from_afm(specification)
+ end
+end
+
+function readers.afm(specification,method)
+ local fullname, tfmdata = specification.filename or "", nil
+ if fullname == "" then
+ local forced = specification.forced or ""
+ if forced ~= "" then
+ tfmdata = check_afm(specification,specification.name .. "." .. forced)
+ end
+ if not tfmdata then
+ method = method or definers.method or "afm or tfm"
+ if method == "tfm" then
+ tfmdata = check_tfm(specification,specification.name)
+ elseif method == "afm" then
+ tfmdata = check_afm(specification,specification.name)
+ elseif method == "tfm or afm" then
+ tfmdata = check_tfm(specification,specification.name) or check_afm(specification,specification.name)
+ else -- method == "afm or tfm" or method == "" then
+ tfmdata = check_afm(specification,specification.name) or check_tfm(specification,specification.name)
+ end
+ end
+ else
+ tfmdata = check_afm(specification,fullname)
+ end
+ return tfmdata
+end
+
+function readers.pfb(specification,method) -- only called when forced
+ local original = specification.specification
+ if trace_defining then
+ report_afm("using afm reader for %a",original)
+ end
+ specification.specification = gsub(original,"%.pfb",".afm")
+ specification.forced = "afm"
+ return readers.afm(specification,method)
+end
diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-otc.lua b/Master/texmf-dist/tex/context/base/mkiv/font-otc.lua
index 871b6f13c97..be9ba86604b 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/font-otc.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/font-otc.lua
@@ -61,8 +61,6 @@ local function addfeature(data,feature,specifications)
end
-- feature has to be unique but the name entry wins eventually
- -- todo alse gpos
-
local fontfeatures = resources.features or everywhere
local unicodes = resources.unicodes
local splitter = lpeg.splitter(" ",unicodes)
@@ -529,6 +527,7 @@ end
otf.enhancers.addfeature = addfeature
local extrafeatures = { }
+local knownfeatures = { }
function otf.addfeature(name,specification)
if type(name) == "table" then
@@ -536,16 +535,31 @@ function otf.addfeature(name,specification)
name = specification.name
end
if type(name) == "string" then
- extrafeatures[name] = specification
+ local slot = knownfeatures[name]
+ if slot then
+ -- we overload one
+ else
+ slot = #extrafeatures + 1
+ knownfeatures[name] = slot
+ end
+ specification.name = name -- to be sure
+ extrafeatures[slot] = specification
end
end
+-- for feature, specification in next, extrafeatures do
+-- addfeature(data,feature,specification)
+-- end
+
local function enhance(data,filename,raw)
- for feature, specification in next, extrafeatures do
- addfeature(data,feature,specification)
+ for slot=1,#extrafeatures do
+ local specification = extrafeatures[slot]
+ addfeature(data,specification.name,specification)
end
end
+-- otf.enhancers.enhance = enhance
+
otf.enhancers.register("check extra features",enhance)
-- tlig --
@@ -574,6 +588,7 @@ local tlig_specification = {
otf.addfeature("tlig",tlig_specification)
registerotffeature {
+ -- this makes it a known feature (in tables)
name = 'tlig',
description = 'tex ligatures',
}
@@ -598,6 +613,7 @@ local trep_specification = {
otf.addfeature("trep",trep_specification)
registerotffeature {
+ -- this makes it a known feature (in tables)
name = 'trep',
description = 'tex replacements',
}
@@ -699,6 +715,7 @@ local anum_specification = {
otf.addfeature("anum",anum_specification) -- todo: only when there is already an arab script feature
registerotffeature {
+ -- this makes it a known feature (in tables)
name = 'anum',
description = 'arabic digits',
}
diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-otj.lua b/Master/texmf-dist/tex/context/base/mkiv/font-otj.lua
index 6ff80d88df8..b65a9db6627 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/font-otj.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/font-otj.lua
@@ -1255,11 +1255,11 @@ local function inject_everything(head,where)
insert_node_after(pre,n,newkern(rightkern))
done = true
end
- end
- if hasmarks then
- local pm = i.markbasenode
- if pm then
- processmark(pm,current,i)
+ if hasmarks then
+ local pm = i.markbasenode
+ if pm then
+ processmark(pm,current,i)
+ end
end
end
end
@@ -1287,11 +1287,11 @@ local function inject_everything(head,where)
insert_node_after(post,n,newkern(rightkern))
done = true
end
- end
- if hasmarks then
- local pm = i.markbasenode
- if pm then
- processmark(pm,current,i)
+ if hasmarks then
+ local pm = i.markbasenode
+ if pm then
+ processmark(pm,current,i)
+ end
end
end
end
@@ -1319,11 +1319,11 @@ local function inject_everything(head,where)
insert_node_after(replace,n,newkern(rightkern))
done = true
end
- end
- if hasmarks then
- local pm = i.markbasenode
- if pm then
- processmark(pm,current,i)
+ if hasmarks then
+ local pm = i.markbasenode
+ if pm then
+ processmark(pm,current,i)
+ end
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-otl.lua b/Master/texmf-dist/tex/context/base/mkiv/font-otl.lua
index f7b6eb5ae5f..01342a9be6e 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/font-otl.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/font-otl.lua
@@ -101,6 +101,12 @@ registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef =
-- end
-- end
+-- Enhancers are used to apply fixes and extensions to fonts. For instance, we use them
+-- to implement tlig and trep features. They are not neccessarily bound to opentype
+-- fonts but can also apply to type one fonts, given that they obey the structure of an
+-- opentype font. They are not to be confused with format specific features but maybe
+-- some are so generic that they might eventually move to this mechanism.
+
local ordered_enhancers = {
"check extra features",
}
@@ -302,7 +308,7 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone
--
enhancers.apply(data,filename,data)
--
- constructors.addcoreunicodes(unicodes)
+ -- constructors.addcoreunicodes(data.resources.unicodes) -- still needed ?
--
if applyruntimefixes then
applyruntimefixes(filename,data)
diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-oto.lua b/Master/texmf-dist/tex/context/base/mkiv/font-oto.lua
index b7ee717c949..23beba7876d 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/font-oto.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/font-oto.lua
@@ -450,3 +450,5 @@ registerotffeature {
base = featuresinitializer,
}
}
+
+otf.basemodeinitializer = featuresinitializer
diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-ots.lua b/Master/texmf-dist/tex/context/base/mkiv/font-ots.lua
index 21225c2274f..c173de2be5a 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/font-ots.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/font-ots.lua
@@ -3652,6 +3652,9 @@ registerotffeature {
}
}
+otf.nodemodeinitializer = featuresinitializer
+otf.featuresprocessor = featuresprocessor
+
-- This can be used for extra handlers, but should be used with care!
otf.handlers = handlers -- used in devanagari
diff --git a/Master/texmf-dist/tex/context/base/mkiv/lang-def.mkiv b/Master/texmf-dist/tex/context/base/mkiv/lang-def.mkiv
index e4906096e0e..5e40a33b030 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/lang-def.mkiv
+++ b/Master/texmf-dist/tex/context/base/mkiv/lang-def.mkiv
@@ -341,7 +341,7 @@
\c!rightquotation=\upperrightdoubleninequote,
\c!date={\v!year,.,\space,\v!month,\space,\v!day,.}]
-\installlanguage [\s!finish] [\s!fi]
+\installlanguage [\s!finnish] [\s!fi]
\installlanguage [\s!hungarian] [\s!hu]
% Altaic Languages: Uigur, Uzbek, Azeri/Azerbaijani, Chuvash,
diff --git a/Master/texmf-dist/tex/context/base/mkiv/lang-ini.lua b/Master/texmf-dist/tex/context/base/mkiv/lang-ini.lua
index eb7e32b89f3..00fdb3f0973 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/lang-ini.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/lang-ini.lua
@@ -40,8 +40,8 @@ local prehyphenchar = lang.prehyphenchar -- global per language
local posthyphenchar = lang.posthyphenchar -- global per language
local preexhyphenchar = lang.preexhyphenchar -- global per language
local postexhyphenchar = lang.postexhyphenchar -- global per language
-local lefthyphenmin = lang.lefthyphenmin
-local righthyphenmin = lang.righthyphenmin
+----- lefthyphenmin = lang.lefthyphenmin
+----- righthyphenmin = lang.righthyphenmin
local sethjcode = lang.sethjcode
local uccodes = characters.uccodes
@@ -344,8 +344,8 @@ function languages.prehyphenchar (what) return prehyphenchar (tolang(what))
function languages.posthyphenchar (what) return posthyphenchar (tolang(what)) end
function languages.preexhyphenchar (what) return preexhyphenchar (tolang(what)) end
function languages.postexhyphenchar(what) return postexhyphenchar(tolang(what)) end
-function languages.lefthyphenmin (what) return lefthyphenmin (tolang(what)) end
-function languages.righthyphenmin (what) return righthyphenmin (tolang(what)) end
+-------- languages.lefthyphenmin (what) return lefthyphenmin (tolang(what)) end
+-------- languages.righthyphenmin (what) return righthyphenmin (tolang(what)) end
-- e['implementer']= 'imple{m}{-}{-}menter'
-- e['manual'] = 'man{}{}{}'
diff --git a/Master/texmf-dist/tex/context/base/mkiv/lang-ini.mkiv b/Master/texmf-dist/tex/context/base/mkiv/lang-ini.mkiv
index 8ff41b28792..214ce8ca38a 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/lang-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/mkiv/lang-ini.mkiv
@@ -518,7 +518,7 @@
\let\dohyphens\relax
-\unexpanded\def\lang_basics_synchronize_min_max
+\unexpanded\def\lang_basics_synchronize_min_max % maybe store this at the lua end
{% these values are stored along with glyph nodes
\lefthyphenmin \numexpr0\languageparameter\s!lefthyphenmin +\hyphenminoffset\relax
\righthyphenmin\numexpr0\languageparameter\s!righthyphenmin+\hyphenminoffset\relax
diff --git a/Master/texmf-dist/tex/context/base/mkiv/lpdf-ini.lua b/Master/texmf-dist/tex/context/base/mkiv/lpdf-ini.lua
index 2149f4729a4..f0b919d4ee0 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/lpdf-ini.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/lpdf-ini.lua
@@ -1059,13 +1059,30 @@ do
return timestamp
end
+ function lpdf.settime(n)
+ if n then
+ n = converters.totime(n)
+ if n then
+ converters.settime(n)
+ timestamp = os.date("%Y-%m-%dT%X",os.time(n)) .. os.timezone(true)
+ end
+ end
+ return timestamp
+ end
+
+ lpdf.settime(tonumber(resolvers.variable("start_time")) or tonumber(resolvers.variable("SOURCE_DATE_EPOCH"))) -- bah
+
function lpdf.pdftimestamp(str)
local Y, M, D, h, m, s, Zs, Zh, Zm = match(str,"^(%d%d%d%d)%-(%d%d)%-(%d%d)T(%d%d):(%d%d):(%d%d)([%+%-])(%d%d):(%d%d)$")
return Y and format("D:%s%s%s%s%s%s%s%s'%s'",Y,M,D,h,m,s,Zs,Zh,Zm)
end
- function lpdf.id()
- return format("%s.%s",tex.jobname,timestamp)
+ function lpdf.id(nodate)
+ if nodate then
+ return tex.jobname
+ else
+ return format("%s.%s",tex.jobname,timestamp)
+ end
end
end
diff --git a/Master/texmf-dist/tex/context/base/mkiv/lpdf-xmp.lua b/Master/texmf-dist/tex/context/base/mkiv/lpdf-xmp.lua
index b8170319c11..c8b86d38464 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/lpdf-xmp.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/lpdf-xmp.lua
@@ -41,7 +41,7 @@ local mapping = {
["ConTeXt.Time"] = { "date", "rdf:Description/pdfx:ConTeXt.Time" },
["ConTeXt.Url"] = { "context", "rdf:Description/pdfx:ConTeXt.Url" },
["ConTeXt.Version"] = { "context", "rdf:Description/pdfx:ConTeXt.Version" },
- ["ID"] = { "date", "rdf:Description/pdfx:ID" }, -- has date
+ ["ID"] = { "id", "rdf:Description/pdfx:ID" }, -- has date
["PTEX.Fullbanner"] = { "metadata","rdf:Description/pdfx:PTEX.Fullbanner" },
-- Adobe PDF schema
["Keywords"] = { "metadata","rdf:Description/pdf:Keywords" },
@@ -60,8 +60,8 @@ local mapping = {
["ModDate"] = { "date", "rdf:Description/xmp:ModDate" }, -- dummy
["ModifyDate"] = { "date", "rdf:Description/xmp:ModifyDate" },
-- XMP Media Management schema
- ["DocumentID"] = { "date", "rdf:Description/xmpMM:DocumentID" }, -- uuid
- ["InstanceID"] = { "date", "rdf:Description/xmpMM:InstanceID" }, -- uuid
+ ["DocumentID"] = { "id", "rdf:Description/xmpMM:DocumentID" }, -- uuid
+ ["InstanceID"] = { "id", "rdf:Description/xmpMM:InstanceID" }, -- uuid
["RenditionClass"] = { "pdf", "rdf:Description/xmpMM:RenditionClass" }, -- PDF/X-4
["VersionID"] = { "pdf", "rdf:Description/xmpMM:VersionID" }, -- PDF/X-4
-- additional entries
@@ -105,32 +105,66 @@ local included = table.setmetatableindex( {
return true
end)
-directives.register("backend.nodates", function(v)
- included.date = not v
+function lpdf.settrailerid(v)
if v then
- report_info("no date/time information will be added to the PDF file")
- end
-end)
-
-directives.register("backend.trailerid", function(v)
- if v then
- if toboolean(v) or v == "" then
+ local b = toboolean(v) or v == ""
+ if b then
v = "This file is processed by ConTeXt and LuaTeX."
else
v = tostring(v)
end
local h = md5.HEX(v)
- report_info("using hashed trailer id %a (%a)",v,h)
+ if b then
+ report_info("using frozen trailer id")
+ else
+ report_info("using hashed trailer id %a (%a)",v,h)
+ end
pdf.settrailerid(format("[<%s> <%s>]",h,h))
end
-end)
+end
+
+function lpdf.setdates(v)
+ local t = type(v)
+ if t == "number" or t == "string" then
+ t = converters.totime(v)
+ if t then
+ included.date = true
+ included.id = "fake"
+ report_info("forced date/time information %a will be used",lpdf.settime(t))
+ lpdf.settrailerid(false)
+ return
+ end
+ end
+ v = toboolean(v)
+ included.date = v
+ if v then
+ included.id = true
+ else
+ report_info("no date/time but fake id information will be added")
+ lpdf.settrailerid(true)
+ included.id = "fake"
+ -- maybe: lpdf.settime(231631200) -- 1975-05-05 % first entry of knuth about tex mentioned in DT
+ end
+end
+
+function lpdf.id() -- overload of ini
+ local banner = tex.jobname
+ if included.date then
+ return format("%s.%s",banner,lpdf.timestamp())
+ else
+ return banner
+ end
+end
+
+directives.register("backend.trailerid", lpdf.settrailerid)
+directives.register("backend.date", lpdf.setdates)
local function permitdetail(what)
local m = mapping[what]
if m then
return included[m[1]] and m[2]
else
- return included[what]
+ return included[what] and true or false
end
end
@@ -214,22 +248,34 @@ end
-- flushing
-local t = { } for i=1,24 do t[i] = random() end
+local function randomstring(n)
+ local t = { }
+ for i=1,n do
+ t[i] = char(96 + random(26))
+ end
+ return concat(t)
+end
+
+randomstring(26) -- kind of initializes and kicks off random
local function flushxmpinfo()
commands.pushrandomseed()
commands.setrandomseed(os.time())
- local t = { } for i=1,24 do t[i] = char(96 + random(26)) end
- local packetid = concat(t)
-
- local documentid = format("uuid:%s",os.uuid())
- local instanceid = format("uuid:%s",os.uuid())
+ local packetid = "no unique packet id here" -- 24 chars
+ local documentid = "no unique document id here"
+ local instanceid = "no unique instance id here"
local producer = format("LuaTeX-%0.2f.%s",status.luatex_version/100,status.luatex_revision)
local creator = "LuaTeX + ConTeXt MkIV"
local time = lpdf.timestamp()
local fullbanner = status.banner
+ if included.id ~= "fake" then
+ packetid = randomstring(24)
+ documentid = "uuid:%s" .. os.uuid()
+ instanceid = "uuid:%s" .. os.uuid()
+ end
+
pdfaddxmpinfo("DocumentID", documentid)
pdfaddxmpinfo("InstanceID", instanceid)
pdfaddxmpinfo("Producer", producer)
diff --git a/Master/texmf-dist/tex/context/base/mkiv/luat-cnf.lua b/Master/texmf-dist/tex/context/base/mkiv/luat-cnf.lua
index 83622ef5399..9d37df7bb10 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/luat-cnf.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/luat-cnf.lua
@@ -42,6 +42,7 @@ luatex = luatex or { }
texconfig.kpse_init = false
texconfig.shell_escape = 't'
+---------.start_time = tonumber(os.getenv("SOURCE_DATE_EPOCH")) -- not used in context
-- as soon as possible
diff --git a/Master/texmf-dist/tex/context/base/mkiv/m-oldotf.mkiv b/Master/texmf-dist/tex/context/base/mkiv/m-oldotf.mkiv
index c7c468d93ec..313f9f48459 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/m-oldotf.mkiv
+++ b/Master/texmf-dist/tex/context/base/mkiv/m-oldotf.mkiv
@@ -25,6 +25,7 @@
"font-otc",
"font-oth",
"font-odv",
+ "font-one",
"font-map",
"font-fbk",
"font-gds",
diff --git a/Master/texmf-dist/tex/context/base/mkiv/meta-ini.mkiv b/Master/texmf-dist/tex/context/base/mkiv/meta-ini.mkiv
index 9f53aea69f6..d0fff83dfd6 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/meta-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/mkiv/meta-ini.mkiv
@@ -486,6 +486,10 @@
\startMPextensions
string contextversion;
contextversion:="\contextversion"; % expanded
+ minute:=\the\normaltime mod 60;
+ hour:=\the\normaltime div 60;
+ year:=\the\normalyear;
+ month:=\the\normalmonth;
\stopMPextensions
%D \macros
diff --git a/Master/texmf-dist/tex/context/base/mkiv/mult-sys.mkiv b/Master/texmf-dist/tex/context/base/mkiv/mult-sys.mkiv
index 01a1674dced..88e956d66e9 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/mult-sys.mkiv
+++ b/Master/texmf-dist/tex/context/base/mkiv/mult-sys.mkiv
@@ -48,7 +48,7 @@
\definesystemconstant {danish} \definesystemconstant {da}
\definesystemconstant {dutch} \definesystemconstant {nl}
\definesystemconstant {english} \definesystemconstant {en}
-\definesystemconstant {finish} \definesystemconstant {fi}
+\definesystemconstant {finnish} \definesystemconstant {fi}
\definesystemconstant {french} \definesystemconstant {fr}
\definesystemconstant {german} \definesystemconstant {de}
\definesystemconstant {hungarian} \definesystemconstant {hu}
diff --git a/Master/texmf-dist/tex/context/base/mkiv/node-fnt.lua b/Master/texmf-dist/tex/context/base/mkiv/node-fnt.lua
index 7f0d239979a..e77280c372e 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/node-fnt.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/node-fnt.lua
@@ -17,9 +17,12 @@ local trace_characters = false trackers.register("nodes.characters", function(
local trace_fontrun = false trackers.register("nodes.fontrun", function(v) trace_fontrun = v end)
local trace_variants = false trackers.register("nodes.variants", function(v) trace_variants = v end)
-local force_discrun = true directives.register("nodes.discrun", function(v) force_discrun = v end)
-local force_boundaryrun = true directives.register("nodes.boundaryrun", function(v) force_boundaryrun = v end)
-local force_basepass = true directives.register("nodes.basepass", function(v) force_basepass = v end)
+-- bad namespace for directives
+
+local force_discrun = true directives.register("nodes.discrun", function(v) force_discrun = v end)
+local force_boundaryrun = true directives.register("nodes.boundaryrun", function(v) force_boundaryrun = v end)
+local force_basepass = true directives.register("nodes.basepass", function(v) force_basepass = v end)
+local keep_redundant = false directives.register("nodes.keepredundant",function(v) keep_redundant = v end)
local report_fonts = logs.reporter("fonts","processing")
@@ -271,6 +274,12 @@ function handlers.characters(head)
end
end
end
+ elseif keep_redundant then
+ -- go on, can be used for tracing
+ elseif not redundant then
+ redundant = { n }
+ else
+ redundant[#redundant+1] = n
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/mkiv/status-files.pdf b/Master/texmf-dist/tex/context/base/mkiv/status-files.pdf
index 5d4c84ce108..6ea8ed23fd9 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/status-files.pdf
+++ b/Master/texmf-dist/tex/context/base/mkiv/status-files.pdf
Binary files differ
diff --git a/Master/texmf-dist/tex/context/base/mkiv/status-lua.pdf b/Master/texmf-dist/tex/context/base/mkiv/status-lua.pdf
index 84aba5a7fd9..cb96b7185ed 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/status-lua.pdf
+++ b/Master/texmf-dist/tex/context/base/mkiv/status-lua.pdf
Binary files differ
diff --git a/Master/texmf-dist/tex/context/base/mkiv/typo-tal.lua b/Master/texmf-dist/tex/context/base/mkiv/typo-tal.lua
index a67cd0a4b47..21c6794c47a 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/typo-tal.lua
+++ b/Master/texmf-dist/tex/context/base/mkiv/typo-tal.lua
@@ -200,10 +200,10 @@ function characteralign.handler(originalhead,where)
while current do
local char, id = isglyph(current)
if char then
- local font = getfont(current)
- -- local unicode = unicodes[font][char]
- local unicode = fontcharacters[font][char].unicode or char -- ignore tables
- if not unicode then
+ local font = getfont(current)
+ local data = fontcharacters[font][char]
+ local unicode = data and data.unicode or char -- ignore tables
+ if not unicode then -- type(unicode) ~= "number"
-- no unicode so forget about it
elseif unicode == separator then
c = current
diff --git a/Master/texmf-dist/tex/context/base/mkiv/typo-wrp.mkiv b/Master/texmf-dist/tex/context/base/mkiv/typo-wrp.mkiv
index 4b18785bd33..08134905021 100644
--- a/Master/texmf-dist/tex/context/base/mkiv/typo-wrp.mkiv
+++ b/Master/texmf-dist/tex/context/base/mkiv/typo-wrp.mkiv
@@ -44,6 +44,7 @@
\spac_crlf_placeholder
\ifcase\raggedstatus\hfil\or\or\or\hfil\fi
\break
+ \hskip\zeropoint % new so that the next word also hyphenates
\ignorespaces}
\unexpanded\def\spac_crlf_placeholder