From b6a82290dcde74722176ee588102f1c924c9fa89 Mon Sep 17 00:00:00 2001 From: Karl Berry Date: Sun, 15 May 2016 21:07:12 +0000 Subject: context (15may16) git-svn-id: svn://tug.org/texlive/trunk@41152 c570f23f-e606-0410-a88d-b1316a301751 --- .../tex/context/base/context-version.pdf | Bin 4249 -> 4252 bytes .../texmf-dist/tex/context/base/mkiv/cont-new.mkiv | 2 +- .../tex/context/base/mkiv/context-todo.tex | 3 + .../texmf-dist/tex/context/base/mkiv/context.mkiv | 2 +- .../texmf-dist/tex/context/base/mkiv/font-lib.mkvi | 10 +- .../texmf-dist/tex/context/base/mkiv/font-one.lua | 363 +----------------- .../texmf-dist/tex/context/base/mkiv/font-onr.lua | 405 +++++++++++++++++++++ .../texmf-dist/tex/context/base/mkiv/font-syn.lua | 189 +++++----- .../texmf-dist/tex/context/base/mkiv/grph-rul.lua | 9 + .../texmf-dist/tex/context/base/mkiv/node-met.lua | 1 + .../texmf-dist/tex/context/base/mkiv/node-rul.lua | 9 + .../texmf-dist/tex/context/base/mkiv/node-rul.mkiv | 5 +- .../texmf-dist/tex/context/base/mkiv/spac-hor.mkiv | 32 +- .../tex/context/base/mkiv/status-files.pdf | Bin 9270 -> 9214 bytes .../tex/context/base/mkiv/status-lua.pdf | Bin 266494 -> 266627 bytes .../texmf-dist/tex/context/base/mkiv/task-ini.lua | 2 + .../texmf-dist/tex/context/base/mkiv/typo-lin.lua | 22 +- 17 files changed, 564 insertions(+), 490 deletions(-) create mode 100644 Master/texmf-dist/tex/context/base/mkiv/font-onr.lua (limited to 'Master/texmf-dist/tex/context/base') diff --git a/Master/texmf-dist/tex/context/base/context-version.pdf b/Master/texmf-dist/tex/context/base/context-version.pdf index 5e69d71142f..2aacea69dd4 100644 Binary files a/Master/texmf-dist/tex/context/base/context-version.pdf and b/Master/texmf-dist/tex/context/base/context-version.pdf differ diff --git a/Master/texmf-dist/tex/context/base/mkiv/cont-new.mkiv b/Master/texmf-dist/tex/context/base/mkiv/cont-new.mkiv index ee9b7de6c9b..65128dabbb7 100644 --- a/Master/texmf-dist/tex/context/base/mkiv/cont-new.mkiv +++ b/Master/texmf-dist/tex/context/base/mkiv/cont-new.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2016.05.13 16:21} +\newcontextversion{2016.05.14 14:06} %D This file is loaded at runtime, thereby providing an excellent place for %D hacks, patches, extensions and new features. diff --git a/Master/texmf-dist/tex/context/base/mkiv/context-todo.tex b/Master/texmf-dist/tex/context/base/mkiv/context-todo.tex index 39cca9a3a72..9569e5352ff 100644 --- a/Master/texmf-dist/tex/context/base/mkiv/context-todo.tex +++ b/Master/texmf-dist/tex/context/base/mkiv/context-todo.tex @@ -12,6 +12,9 @@ \startitem head||tail cleanup in disc nodes (get rid of temp i.e.\ delay till linebreak) \stopitem + \startitem + cleanup passive nodes + \stopitem \startitem optimize some callback resolution (more direct) \stopitem diff --git a/Master/texmf-dist/tex/context/base/mkiv/context.mkiv b/Master/texmf-dist/tex/context/base/mkiv/context.mkiv index bf9d4627078..72718b53cad 100644 --- a/Master/texmf-dist/tex/context/base/mkiv/context.mkiv +++ b/Master/texmf-dist/tex/context/base/mkiv/context.mkiv @@ -39,7 +39,7 @@ %D up and the dependencies are more consistent. \edef\contextformat {\jobname} -\edef\contextversion{2016.05.13 16:21} +\edef\contextversion{2016.05.14 14:06} \edef\contextkind {beta} %D For those who want to use this: diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-lib.mkvi b/Master/texmf-dist/tex/context/base/mkiv/font-lib.mkvi index 90de409d138..fd6f70d69a5 100644 --- a/Master/texmf-dist/tex/context/base/mkiv/font-lib.mkvi +++ b/Master/texmf-dist/tex/context/base/mkiv/font-lib.mkvi @@ -32,15 +32,11 @@ \registerctxluafile{font-dsp}{1.001} % ... for this one \registerctxluafile{font-off}{1.001} % the old loader -\registerctxluafile{font-syn}{1.001} - \registerctxluafile{font-tfm}{1.001} \registerctxluafile{font-hsh}{1.001} % hashes used by context \registerctxluafile{font-nod}{1.001} -\registerctxluafile{font-trt}{1.001} - \registerctxluafile{font-oti}{1.001} % otf initialization \registerctxluafile{font-ott}{1.001} % otf tables (first) @@ -57,10 +53,16 @@ % we use otf code for type one +\registerctxluafile{font-onr}{1.001} \registerctxluafile{font-one}{1.001} %registerctxluafile{font-afm}{1.001} \registerctxluafile{font-afk}{1.001} +% name database + +\registerctxluafile{font-syn}{1.001} +\registerctxluafile{font-trt}{1.001} + % so far \registerctxluafile{font-pat}{1.001} % patchers diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-one.lua b/Master/texmf-dist/tex/context/base/mkiv/font-one.lua index 5d54150c527..77f2560f6cb 100644 --- a/Master/texmf-dist/tex/context/base/mkiv/font-one.lua +++ b/Master/texmf-dist/tex/context/base/mkiv/font-one.lua @@ -55,7 +55,7 @@ local otfenhancers = otf.enhancers local afmfeatures = constructors.newfeatures("afm") local registerafmfeature = afmfeatures.register -afm.version = 1.510 -- incrementing this number one up will force a re-cache +afm.version = 1.512 -- incrementing this number one up will force a re-cache afm.cache = containers.define("fonts", "afm", afm.version, true) afm.autoprefixed = true -- this will become false some day (catches texnansi-blabla.*) @@ -66,357 +66,6 @@ local overloads = fonts.mappings.overloads local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes ---[[ldx-- -

We start with the basic reader which we give a name similar to the built in -and reader.

---ldx]]-- - --- Comment FONTIDENTIFIER LMMATHSYMBOLS10 --- Comment CODINGSCHEME TEX MATH SYMBOLS --- Comment DESIGNSIZE 10.0 pt --- Comment CHECKSUM O 4261307036 --- Comment SPACE 0 plus 0 minus 0 --- Comment QUAD 1000 --- Comment EXTRASPACE 0 --- Comment NUM 676.508 393.732 443.731 --- Comment DENOM 685.951 344.841 --- Comment SUP 412.892 362.892 288.889 --- Comment SUB 150 247.217 --- Comment SUPDROP 386.108 --- Comment SUBDROP 50 --- Comment DELIM 2390 1010 --- Comment AXISHEIGHT 250 - ---[[ldx-- -

We now use a new (unfinished) pfb loader but I see no differences between the old -and new vectors (we actually had one bad vector with the old loader).

---ldx]]-- - -local get_indexes - -do - - local n, m - - local progress = function(str,position,name,size) - local forward = position + tonumber(size) + 3 + 2 - n = n + 1 - if n >= m then - return #str, name - elseif forward < #str then - return forward, name - else - return #str, name - end - end - - local initialize = function(str,position,size) - n = 0 - m = tonumber(size) - return position + 1 - end - - local charstrings = P("/CharStrings") - local name = P("/") * C((R("az")+R("AZ")+R("09")+S("-_."))^1) - local size = C(R("09")^1) - local spaces = P(" ")^1 - - local p_filternames = Ct ( - (1-charstrings)^0 * charstrings * spaces * Cmt(size,initialize) - * (Cmt(name * P(" ")^1 * C(R("09")^1), progress) + P(1))^1 - ) - - -- if one of first 4 not 0-9A-F then binary else hex - - local decrypt - - do - - local r, c1, c2, n = 0, 0, 0, 0 - - local function step(c) - local cipher = byte(c) - local plain = bxor(cipher,rshift(r,8)) - r = ((cipher + r) * c1 + c2) % 65536 - return char(plain) - end - - decrypt = function(binary) - r, c1, c2, n = 55665, 52845, 22719, 4 - binary = gsub(binary,".",step) - return sub(binary,n+1) - end - - -- local pattern = Cs((P(1) / step)^1) - -- - -- decrypt = function(binary) - -- r, c1, c2, n = 55665, 52845, 22719, 4 - -- binary = lpegmatch(pattern,binary) - -- return sub(binary,n+1) - -- end - - end - - local function loadpfbvector(filename) - -- for the moment limited to encoding only - - local data = io.loaddata(resolvers.findfile(filename)) - - if not find(data,"!PS%-AdobeFont%-") then - print("no font",filename) - return - end - - if not data then - print("no data",filename) - return - end - - local ascii, binary = match(data,"(.*)eexec%s+......(.*)") - - if not binary then - print("no binary",filename) - return - end - - binary = decrypt(binary,4) - - local vector = lpegmatch(p_filternames,binary) - - vector[0] = table.remove(vector,1) - - if not vector then - print("no vector",filename) - return - end - - return vector - - end - - get_indexes = function(data,pfbname) - local vector = loadpfbvector(pfbname) - if vector then - local characters = data.characters - if trace_loading then - report_afm("getting index data from %a",pfbname) - end - for index=1,#vector do - local name = vector[index] - local char = characters[name] - if char then - if trace_indexing then - report_afm("glyph %a has index %a",name,index) - end - char.index = index - end - end - end - end - -end - ---[[ldx-- -

We start with the basic reader which we give a name similar to the built in -and reader.

---ldx]]-- - --- Comment FONTIDENTIFIER LMMATHSYMBOLS10 --- Comment CODINGSCHEME TEX MATH SYMBOLS --- Comment DESIGNSIZE 10.0 pt --- Comment CHECKSUM O 4261307036 --- Comment SPACE 0 plus 0 minus 0 --- Comment QUAD 1000 --- Comment EXTRASPACE 0 --- Comment NUM 676.508 393.732 443.731 --- Comment DENOM 685.951 344.841 --- Comment SUP 412.892 362.892 288.889 --- Comment SUB 150 247.217 --- Comment SUPDROP 386.108 --- Comment SUBDROP 50 --- Comment DELIM 2390 1010 --- Comment AXISHEIGHT 250 --- Comment DesignSize 12 (pts) --- Comment TFM designsize: 12 (in points) - -local readafm - -do -- no need for a further speedup with locals - - local spacing = patterns.spacer - local lineend = patterns.newline - local number = spacing * (R("09") + S("."))^1 / tonumber - local name = spacing * C((1-spacing)^1) - local words = spacing * (1 - lineend)^1 / strip - local rest = (1 - lineend)^0 - local fontdata = Carg(1) - local semicolon = spacing * P(";") - local plus = P("plus") * number - local minus = P("minus") * number - - -- kern pairs - - local function addkernpair(data,one,two,value) - local chr = data.characters[one] - if chr then - local kerns = chr.kerns - if kerns then - kerns[two] = tonumber(value) - else - chr.kerns = { [two] = tonumber(value) } - end - end - end - - local p_kernpair = (fontdata * P("KPX") * name * name * number) / addkernpair - - -- char metrics - - local chr = false - local ind = 0 - - local function start() - ind = 0 - chr = { } - end - - local function stop() - ind = 0 - chr = false - end - - local function setindex(i) - if i < 0 then - ind = ind + 1 -- ? - else - ind = i - end - chr = { - index = ind - } - end - - local function setwidth(width) - chr.width = width - end - - local function setname(data,name) - data.characters[name] = chr - end - - local function setboundingbox(boundingbox) - chr.boundingbox = boundingbox - end - - local function setligature(plus,becomes) - local ligatures = chr.ligatures - if ligatures then - ligatures[plus] = becomes - else - chr.ligatures = { [plus] = becomes } - end - end - - local p_charmetric = ( ( - P("C") * number / setindex - + P("WX") * number / setwidth - + P("N") * fontdata * name / setname - + P("B") * Ct((number)^4) / setboundingbox - + P("L") * (name)^2 / setligature - ) * semicolon )^1 - - local p_charmetrics = P("StartCharMetrics") * number * (p_charmetric + (1-P("EndCharMetrics")))^0 * P("EndCharMetrics") - local p_kernpairs = P("StartKernPairs") * number * (p_kernpair + (1-P("EndKernPairs")) )^0 * P("EndKernPairs") - - local function set_1(data,key,a) data.metadata[lower(key)] = a end - local function set_2(data,key,a,b) data.metadata[lower(key)] = { a, b } end - local function set_3(data,key,a,b,c) data.metadata[lower(key)] = { a, b, c } end - - local p_parameters = P(false) - + P("FontName") * fontdata * words / function(data,line) - data.metadata.fontname = line - data.metadata.fullname = line - end - + P("ItalicAngle") * fontdata * number / function(data,angle) - data.metadata.italicangle = angle - end - + P("IsFixedPitch") * fontdata * name / function(data,pitch) - data.metadata.monospaced = toboolean(pitch,true) - end - + P("CharWidth") * fontdata * number / function(data,width) - data.metadata.charwidth = width - end - + P("XHeight") * fontdata * number / function(data,xheight) - data.metadata.xheight = xheight - end - + P("Descender") * fontdata * number / function(data,descender) - data.metadata.descender = descender - end - + P("Ascender") * fontdata * number / function(data,ascender) - data.metadata.ascender = ascender - end - + P("Comment") * spacing * ( P(false) - + (fontdata * C("DESIGNSIZE") * number * rest) / set_1 -- 1 - + (fontdata * C("TFM designsize") * number * rest) / set_1 - + (fontdata * C("DesignSize") * number * rest) / set_1 - + (fontdata * C("CODINGSCHEME") * words * rest) / set_1 -- - + (fontdata * C("CHECKSUM") * number * words * rest) / set_1 -- 2 - + (fontdata * C("SPACE") * number * plus * minus * rest) / set_3 -- 3 4 5 - + (fontdata * C("QUAD") * number * rest) / set_1 -- 6 - + (fontdata * C("EXTRASPACE") * number * rest) / set_1 -- 7 - + (fontdata * C("NUM") * number * number * number * rest) / set_3 -- 8 9 10 - + (fontdata * C("DENOM") * number * number * rest) / set_2 -- 11 12 - + (fontdata * C("SUP") * number * number * number * rest) / set_3 -- 13 14 15 - + (fontdata * C("SUB") * number * number * rest) / set_2 -- 16 17 - + (fontdata * C("SUPDROP") * number * rest) / set_1 -- 18 - + (fontdata * C("SUBDROP") * number * rest) / set_1 -- 19 - + (fontdata * C("DELIM") * number * number * rest) / set_2 -- 20 21 - + (fontdata * C("AXISHEIGHT") * number * rest) / set_1 -- 22 - ) - - local parser = ( P("StartFontMetrics") / start ) - * ( p_charmetrics + p_kernpairs + p_parameters + (1-P("EndFontMetrics")) )^0 - * ( P("EndFontMetrics") / stop ) - - readafm = function(filename) - local ok, afmblob, size = resolvers.loadbinfile(filename) -- has logging - if ok and afmblob then - local data = { - resources = { - filename = resolvers.unresolve(filename), - version = afm.version, - creator = "context mkiv", - }, - properties = { - hasitalics = false, - }, - goodies = { - }, - metadata = { - filename = file.removesuffix(file.basename(filename)) - }, - characters = { - -- a temporary store - }, - descriptions = { - -- the final store - }, - } - if trace_loading then - report_afm("parsing afm file %a",filename) - end - lpegmatch(parser,afmblob,1,data) - return data - else - if trace_loading then - report_afm("no valid afm file %a",filename) - end - return nil - end - end - -end - --[[ldx--

We cache files. Caching is taken care of in the loader. We cheat a bit by adding ligatures and kern information to the afm derived data. That way we can set them faster @@ -477,16 +126,8 @@ function afm.load(filename) end if not data or data.size ~= size or data.time ~= time or data.pfbsize ~= pfbsize or data.pfbtime ~= pfbtime then report_afm("reading %a",filename) - data = readafm(filename) + data = afm.readers.loadfont(filename,pfbname) if data then - if pfbname ~= "" then - data.resources.filename = resolvers.unresolve(pfbname) - get_indexes(data,pfbname) - elseif trace_loading then - report_afm("no pfb file for %a",filename) - -- data.resources.filename = "unset" -- better than loading the afm file - end - -- we now have all the data loaded applyenhancers(data,filename) -- otfreaders.addunicodetable(data) -- only when not done yet fonts.mappings.addtounicode(data,filename) diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-onr.lua b/Master/texmf-dist/tex/context/base/mkiv/font-onr.lua new file mode 100644 index 00000000000..2699f25bbfd --- /dev/null +++ b/Master/texmf-dist/tex/context/base/mkiv/font-onr.lua @@ -0,0 +1,405 @@ +if not modules then modules = { } end modules ['font-onr'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

Some code may look a bit obscure but this has to do with the fact that we also use +this code for testing and much code evolved in the transition from to + to .

+ +

The following code still has traces of intermediate font support where we handles +font encodings. Eventually font encoding went away but we kept some code around in +other modules.

+ +

This version implements a node mode approach so that users can also more easily +add features.

+--ldx]]-- + +local fonts, logs, trackers, resolvers = fonts, logs, trackers, resolvers + +local next, type, tonumber, rawget = next, type, tonumber, rawget +local match, lower, gsub, strip, find = string.match, string.lower, string.gsub, string.strip, string.find +local char, byte, sub = string.char, string.byte, string.sub +local abs = math.abs +local bxor, rshift = bit32.bxor, bit32.rshift +local P, S, R, Cmt, C, Ct, Cs, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.Cmt, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg +local lpegmatch, patterns = lpeg.match, lpeg.patterns + +local trace_indexing = false trackers.register("afm.indexing", function(v) trace_indexing = v end) +local trace_loading = false trackers.register("afm.loading", function(v) trace_loading = v end) + +local report_afm = logs.reporter("fonts","afm loading") + +fonts = fonts or { } +local handlers = fonts.handlers or { } +fonts.handlers = handlers +local afm = handlers.afm or { } +handlers.afm = afm +local readers = afm.readers or { } +afm.readers = readers + +afm.version = 1.512 -- incrementing this number one up will force a re-cache + +--[[ldx-- +

We start with the basic reader which we give a name similar to the built in +and reader.

+

We use a new (unfinished) pfb loader but I see no differences between the old +and new vectors (we actually had one bad vector with the old loader).

+--ldx]]-- + +local get_indexes + +do + + local n, m + + local progress = function(str,position,name,size) + local forward = position + tonumber(size) + 3 + 2 + n = n + 1 + if n >= m then + return #str, name + elseif forward < #str then + return forward, name + else + return #str, name + end + end + + local initialize = function(str,position,size) + n = 0 + m = tonumber(size) + return position + 1 + end + + local charstrings = P("/CharStrings") + local name = P("/") * C((R("az")+R("AZ")+R("09")+S("-_."))^1) + local size = C(R("09")^1) + local spaces = P(" ")^1 + + local p_filternames = Ct ( + (1-charstrings)^0 * charstrings * spaces * Cmt(size,initialize) + * (Cmt(name * P(" ")^1 * C(R("09")^1), progress) + P(1))^1 + ) + + -- if one of first 4 not 0-9A-F then binary else hex + + local decrypt + + do + + local r, c1, c2, n = 0, 0, 0, 0 + + local function step(c) + local cipher = byte(c) + local plain = bxor(cipher,rshift(r,8)) + r = ((cipher + r) * c1 + c2) % 65536 + return char(plain) + end + + decrypt = function(binary) + r, c1, c2, n = 55665, 52845, 22719, 4 + binary = gsub(binary,".",step) + return sub(binary,n+1) + end + + -- local pattern = Cs((P(1) / step)^1) + -- + -- decrypt = function(binary) + -- r, c1, c2, n = 55665, 52845, 22719, 4 + -- binary = lpegmatch(pattern,binary) + -- return sub(binary,n+1) + -- end + + end + + local function loadpfbvector(filename) + -- for the moment limited to encoding only + + local data = io.loaddata(resolvers.findfile(filename)) + + if not data then + print("no data",filename) + return + end + + if not find(data,"!PS%-AdobeFont%-") then + print("no font",filename) + return + end + + local ascii, binary = match(data,"(.*)eexec%s+......(.*)") + + if not binary then + print("no binary",filename) + return + end + + binary = decrypt(binary,4) + + local vector = lpegmatch(p_filternames,binary) + + if vector[1] == ".notdef" then + -- tricky + vector[0] = table.remove(vector,1) + end + + if not vector then + print("no vector",filename) + return + end + + return vector + + end + + get_indexes = function(data,pfbname) + local vector = loadpfbvector(pfbname) + if vector then + local characters = data.characters + if trace_loading then + report_afm("getting index data from %a",pfbname) + end + for index=1,#vector do + local name = vector[index] + local char = characters[name] + if char then + if trace_indexing then + report_afm("glyph %a has index %a",name,index) + end + char.index = index + end + end + end + end + +end + +--[[ldx-- +

We start with the basic reader which we give a name similar to the built in +and reader. We only need data that is relevant for our use. We don't support +more complex arrangements like multiple master (obsolete), direction specific kerning, etc.

+--ldx]]-- + +local spacing = patterns.whitespace +local lineend = patterns.newline +local number = spacing * S("+-")^-1 * (R("09") + S("."))^1 / tonumber +local name = spacing * C((1-spacing)^1) +local words = spacing * (1 - lineend)^1 / strip +local rest = (1 - lineend)^0 +local fontdata = Carg(1) +local semicolon = spacing * P(";") +local plus = P("plus") * number +local minus = P("minus") * number + +-- kern pairs + +local function addkernpair(data,one,two,value) + local chr = data.characters[one] + if chr then + local kerns = chr.kerns + if kerns then + kerns[two] = tonumber(value) + else + chr.kerns = { [two] = tonumber(value) } + end + end +end + +local p_kernpair = (fontdata * P("KPX") * name * name * number) / addkernpair + +-- char metrics + +local chr = false +local ind = 0 + +local function start(data,version) + data.metadata.afmversion = version + ind = 0 + chr = { } +end + +local function stop() + ind = 0 + chr = false +end + +local function setindex(i) + if i < 0 then + ind = ind + 1 -- ? + else + ind = i + end + chr = { + index = ind + } +end + +local function setwidth(width) + chr.width = width +end + +local function setname(data,name) + data.characters[name] = chr +end + +local function setboundingbox(boundingbox) + chr.boundingbox = boundingbox +end + +local function setligature(plus,becomes) + local ligatures = chr.ligatures + if ligatures then + ligatures[plus] = becomes + else + chr.ligatures = { [plus] = becomes } + end +end + +local p_charmetric = ( ( + P("C") * number / setindex + + P("WX") * number / setwidth + + P("N") * fontdata * name / setname + + P("B") * Ct((number)^4) / setboundingbox + + P("L") * (name)^2 / setligature + ) * semicolon )^1 + +local p_charmetrics = P("StartCharMetrics") * number * (p_charmetric + (1-P("EndCharMetrics")))^0 * P("EndCharMetrics") +local p_kernpairs = P("StartKernPairs") * number * (p_kernpair + (1-P("EndKernPairs" )))^0 * P("EndKernPairs" ) + +local function set_1(data,key,a) data.metadata[lower(key)] = a end +local function set_2(data,key,a,b) data.metadata[lower(key)] = { a, b } end +local function set_3(data,key,a,b,c) data.metadata[lower(key)] = { a, b, c } end + +-- Notice string +-- EncodingScheme string +-- MappingScheme integer +-- EscChar integer +-- CharacterSet string +-- Characters integer +-- IsBaseFont boolean +-- VVector number number +-- IsFixedV boolean + +local p_parameters = P(false) + + fontdata + * ((P("FontName") + P("FullName") + P("FamilyName"))/lower) + * words / function(data,key,value) + data.metadata[key] = value + end + + fontdata + * ((P("Weight") + P("Version"))/lower) + * name / function(data,key,value) + data.metadata[key] = value + end + + fontdata + * P("IsFixedPitch") + * name / function(data,pitch) + data.metadata.monospaced = toboolean(pitch,true) + end + + fontdata + * P("FontBBox") + * Ct(number^4) / function(data,boundingbox) + data.metadata.boundingbox = boundingbox + end + + fontdata + * ((P("CharWidth") + P("CapHeight") + P("XHeight") + P("Descender") + P("Ascender") + P("ItalicAngle"))/lower) + * number / function(data,key,value) + data.metadata[key] = value + end + + P("Comment") * spacing * ( P(false) + + (fontdata * C("DESIGNSIZE") * number * rest) / set_1 -- 1 + + (fontdata * C("TFM designsize") * number * rest) / set_1 + + (fontdata * C("DesignSize") * number * rest) / set_1 + + (fontdata * C("CODINGSCHEME") * words * rest) / set_1 -- + + (fontdata * C("CHECKSUM") * number * words * rest) / set_1 -- 2 + + (fontdata * C("SPACE") * number * plus * minus * rest) / set_3 -- 3 4 5 + + (fontdata * C("QUAD") * number * rest) / set_1 -- 6 + + (fontdata * C("EXTRASPACE") * number * rest) / set_1 -- 7 + + (fontdata * C("NUM") * number * number * number * rest) / set_3 -- 8 9 10 + + (fontdata * C("DENOM") * number * number * rest) / set_2 -- 11 12 + + (fontdata * C("SUP") * number * number * number * rest) / set_3 -- 13 14 15 + + (fontdata * C("SUB") * number * number * rest) / set_2 -- 16 17 + + (fontdata * C("SUPDROP") * number * rest) / set_1 -- 18 + + (fontdata * C("SUBDROP") * number * rest) / set_1 -- 19 + + (fontdata * C("DELIM") * number * number * rest) / set_2 -- 20 21 + + (fontdata * C("AXISHEIGHT") * number * rest) / set_1 -- 22 + ) + +local fullparser = ( P("StartFontMetrics") * fontdata * name / start ) + * ( p_charmetrics + p_kernpairs + p_parameters + (1-P("EndFontMetrics")) )^0 + * ( P("EndFontMetrics") / stop ) + +local infoparser = ( P("StartFontMetrics") * fontdata * name / start ) + * ( p_parameters + (1-P("EndFontMetrics")) )^0 + * ( P("EndFontMetrics") / stop ) + +-- infoparser = ( P("StartFontMetrics") * fontdata * name / start ) +-- * ( p_parameters + (1-P("EndFontMetrics") - P("StartCharMetrics")) )^0 +-- * ( (P("EndFontMetrics") + P("StartCharMetrics")) / stop ) + +local function read(filename,parser) + local afmblob = io.loaddata(filename) + if afmblob then + local data = { + resources = { + filename = resolvers.unresolve(filename), + version = afm.version, + creator = "context mkiv", + }, + properties = { + hasitalics = false, + }, + goodies = { + }, + metadata = { + filename = file.removesuffix(file.basename(filename)) + }, + characters = { + -- a temporary store + }, + descriptions = { + -- the final store + }, + } + if trace_loading then + report_afm("parsing afm file %a",filename) + end + lpegmatch(parser,afmblob,1,data) + return data + else + if trace_loading then + report_afm("no valid afm file %a",filename) + end + return nil + end +end + +function readers.loadfont(afmname,pfbname) + local data = read(resolvers.findfile(afmname),fullparser) + if data then + if not pfbname or pfbname == "" then + pfbname = file.replacesuffix(file.nameonly(afmname),"pfb") + pfbname = resolvers.findfile(pfbname) + end + if pfbname and pfbname ~= "" then + data.resources.filename = resolvers.unresolve(pfbname) + get_indexes(data,pfbname) + elseif trace_loading then + report_afm("no pfb file for %a",afmname) + -- data.resources.filename = "unset" -- better than loading the afm file + end + return data + end +end + +function readers.getinfo(filename) + local data = read(resolvers.findfile(filename),infoparser) + if data then + return data.metadata + end +end + diff --git a/Master/texmf-dist/tex/context/base/mkiv/font-syn.lua b/Master/texmf-dist/tex/context/base/mkiv/font-syn.lua index f27995212bb..ef0d44a843e 100644 --- a/Master/texmf-dist/tex/context/base/mkiv/font-syn.lua +++ b/Master/texmf-dist/tex/context/base/mkiv/font-syn.lua @@ -308,106 +308,107 @@ end but to keep the overview, we define them here.

--ldx]]-- +filters.afm = fonts.handlers.afm.readers.getinfo filters.otf = fonts.handlers.otf.readers.getinfo filters.ttf = filters.otf filters.ttc = filters.otf -------.ttx = filters.otf -local function normalize(t) -- only for afm parsing - local boundingbox = t.fontbbox - if boundingbox then - for i=1,#boundingbox do - boundingbox[i] = tonumber(boundingbox[i]) - end - else - boundingbox = { 0, 0, 0, 0 } - end - return { - copyright = t.copyright, - fontname = t.fontname, - fullname = t.fullname, - familyname = t.familyname, - weight = t.weight, - widtht = t.width, - italicangle = tonumber(t.italicangle) or 0, - monospaced = toboolean(t.isfixedpitch) or false, - boundingbox = boundingbox, - version = t.version, -- not used - capheight = tonumber(t.capheight), - xheight = tonumber(t.xheight), - ascender = tonumber(t.ascender), - descender = tonumber(t.descender), - } -end - -local p_spaces = lpegpatterns.whitespace -local p_number = (R("09")+S(".-+"))^1 / tonumber -local p_boolean = P("false") * Cc(false) - + P("false") * Cc(false) -local p_string = P("(") * C((lpegpatterns.nestedparents + 1 - P(")"))^1) * P(")") -local p_array = P("[") * Ct((p_number + p_boolean + p_string + p_spaces^1)^1) * P("]") - + P("{") * Ct((p_number + p_boolean + p_string + p_spaces^1)^1) * P("}") - -local p_key = P("/") * C(R("AZ","az")^1) -local p_value = p_string - + p_number - + p_boolean - + p_array - -local p_entry = p_key * p_spaces^0 * p_value - -function filters.afm(name) - -- we could parse the afm file as well, and then report an error but - -- it's not worth the trouble - local pfbname = findfile(removesuffix(name)..".pfb","pfb") or "" - if pfbname == "" then - pfbname = findfile(nameonly(name)..".pfb","pfb") or "" - end - if pfbname ~= "" then - local f = io.open(name) - if f then - local hash = { } - local okay = false - for line in f:lines() do -- slow but only a few lines at the beginning - if find(line,"StartCharMetrics",1,true) then - break - else - local key, value = match(line,"^(.+)%s+(.+)%s*$") - if key and #key > 0 then - hash[lower(key)] = value - end - end - end - f:close() - return normalize(hash) - end - end - return nil, "no matching pfb file" -end +-- local function normalize(t) -- only for afm parsing +-- local boundingbox = t.boundingbox or t.fontbbox +-- if boundingbox then +-- for i=1,#boundingbox do +-- boundingbox[i] = tonumber(boundingbox[i]) +-- end +-- else +-- boundingbox = { 0, 0, 0, 0 } +-- end +-- return { +-- copyright = t.copyright, +-- fontname = t.fontname, +-- fullname = t.fullname, +-- familyname = t.familyname, +-- weight = t.weight, +-- widtht = t.width, +-- italicangle = tonumber(t.italicangle) or 0, +-- monospaced = t.monospaced or toboolean(t.isfixedpitch) or false, +-- boundingbox = boundingbox, +-- version = t.version, -- not used +-- capheight = tonumber(t.capheight), +-- xheight = tonumber(t.xheight), +-- ascender = tonumber(t.ascender), +-- descender = tonumber(t.descender), +-- } +-- end +-- +-- function filters.afm(name) +-- -- we could parse the afm file as well, and then report an error but +-- -- it's not worth the trouble +-- local pfbname = findfile(removesuffix(name)..".pfb","pfb") or "" +-- if pfbname == "" then +-- pfbname = findfile(nameonly(name)..".pfb","pfb") or "" +-- end +-- if pfbname ~= "" then +-- local f = io.open(name) +-- if f then +-- local hash = { } +-- local okay = false +-- for line in f:lines() do -- slow but only a few lines at the beginning +-- if find(line,"StartCharMetrics",1,true) then +-- break +-- else +-- local key, value = match(line,"^(.+)%s+(.+)%s*$") +-- if key and #key > 0 then +-- hash[lower(key)] = value +-- end +-- end +-- end +-- f:close() +-- return normalize(hash) +-- end +-- end +-- return nil, "no matching pfb file" +-- end -function filters.pfb(name) - local f = io.open(name) - if f then - local hash = { } - local okay = false - for line in f:lines() do -- slow but only a few lines at the beginning - if find(line,"dict begin") then - okay = true - elseif not okay then - -- go on - elseif find(line,"currentdict end") then - break - else - local key, value = lpegmatch(p_entry,line) - if key and value then - hash[lower(key)] = value - end - end - end - f:close() - return normalize(hash) - end -end +-- local p_spaces = lpegpatterns.whitespace +-- local p_number = (R("09")+S(".-+"))^1 / tonumber +-- local p_boolean = P("false") * Cc(false) +-- + P("false") * Cc(false) +-- local p_string = P("(") * C((lpegpatterns.nestedparents + 1 - P(")"))^1) * P(")") +-- local p_array = P("[") * Ct((p_number + p_boolean + p_string + p_spaces^1)^1) * P("]") +-- + P("{") * Ct((p_number + p_boolean + p_string + p_spaces^1)^1) * P("}") +-- +-- local p_key = P("/") * C(R("AZ","az")^1) +-- local p_value = p_string +-- + p_number +-- + p_boolean +-- + p_array +-- +-- local p_entry = p_key * p_spaces^0 * p_value +-- +-- function filters.pfb(name) +-- local f = io.open(name) +-- if f then +-- local hash = { } +-- local okay = false +-- for line in f:lines() do -- slow but only a few lines at the beginning +-- if find(line,"dict begin") then +-- okay = true +-- elseif not okay then +-- -- go on +-- elseif find(line,"currentdict end") then +-- break +-- else +-- local key, value = lpegmatch(p_entry,line) +-- if key and value then +-- hash[lower(key)] = value +-- end +-- end +-- end +-- f:close() +-- return normalize(hash) +-- end +-- end --[[ldx--

The scanner loops over the filters using the information stored in diff --git a/Master/texmf-dist/tex/context/base/mkiv/grph-rul.lua b/Master/texmf-dist/tex/context/base/mkiv/grph-rul.lua index 5d3cb8ad5d1..e5aa053963d 100644 --- a/Master/texmf-dist/tex/context/base/mkiv/grph-rul.lua +++ b/Master/texmf-dist/tex/context/base/mkiv/grph-rul.lua @@ -23,6 +23,10 @@ local a_colorspace = attributes.private('colormodel') local mpcolor = attributes.colors.mpcolor +local trace_mp = false trackers.register("rules.mp", function(v) trace_mp = v end) + +local report_mp = logs.reporter("rules","mp") + local floor = math.floor local random = math.random @@ -65,6 +69,7 @@ FakeWord(%width%,%height%,%depth%,%line%,%color%); FakeRule(%width%,%height%,%depth%,%line%,%color%); ]], ["fake:rest"] = replacer [[ +RuleOption := "%option%" ; RuleWidth := %width% ; RuleHeight := %height% ; RuleDepth := %depth% ; @@ -87,8 +92,12 @@ def RuleColor = %color% enddef ; offset = p.offset or 0, line = (p.line or 65536) * bpfactor, color = mpcolor(p.ma,p.ca,p.ta), + option = p.option or "", } local m = cache[code] + if trace_mp then + report_mp(m) + end if m and m ~= "" then pdfprint("direct",m) end diff --git a/Master/texmf-dist/tex/context/base/mkiv/node-met.lua b/Master/texmf-dist/tex/context/base/mkiv/node-met.lua index 272b1d1769a..432ecd1ec94 100644 --- a/Master/texmf-dist/tex/context/base/mkiv/node-met.lua +++ b/Master/texmf-dist/tex/context/base/mkiv/node-met.lua @@ -91,6 +91,7 @@ nodes.slide = node.slide nodes.vpack = node.vpack nodes.fields = node.fields nodes.is_node = node.is_node +nodes.setglue = node.setglue nodes.first_glyph = node.first_glyph nodes.has_glyph = node.has_glyph or node.first_glyph diff --git a/Master/texmf-dist/tex/context/base/mkiv/node-rul.lua b/Master/texmf-dist/tex/context/base/mkiv/node-rul.lua index 5ac43ea4ac9..a1ac67657a6 100644 --- a/Master/texmf-dist/tex/context/base/mkiv/node-rul.lua +++ b/Master/texmf-dist/tex/context/base/mkiv/node-rul.lua @@ -490,6 +490,15 @@ function nodes.shifts.enable() tasks.enableaction("shipouts","nodes.shifts.handler") end +-- linefillers (placeholder) + +nodes.linefillers = nodes.linefillers or { } +nodes.linefillers.data = nodes.linefillers.data or { } + +function nodes.linefillers.handler(head) + return head, false +end + -- interface local implement = interfaces.implement diff --git a/Master/texmf-dist/tex/context/base/mkiv/node-rul.mkiv b/Master/texmf-dist/tex/context/base/mkiv/node-rul.mkiv index 8706b8ecf39..8c7f1d08bd4 100644 --- a/Master/texmf-dist/tex/context/base/mkiv/node-rul.mkiv +++ b/Master/texmf-dist/tex/context/base/mkiv/node-rul.mkiv @@ -348,8 +348,8 @@ \clf_defineshift continue {\shiftparameter\c!continue}% unit {\shiftparameter\c!unit}% - method \shiftparameter\c!method - dy \shiftparameter\c!dy % number + method \shiftparameter\c!method + dy \shiftparameter\c!dy % number \relax}} \unexpanded\def\node_shifts_redefine#1% @@ -366,6 +366,7 @@ {\def\currentshift{#1}% \expandafter\let\expandafter\c_node_shifts_index\csname\??shiftindex#1\endcsname \advance\c_node_shifts_index\plusone + \clf_enableshifts % will be relaxed \attribute\shiftedattribute\numexpr \plusthousand*\c_node_shifts_index +\csname\??shiftattribute#1\ifcsname\??shift#1:\number\c_node_shifts_index\s!parent\endcsname:\number\c_node_shifts_index\fi\endcsname diff --git a/Master/texmf-dist/tex/context/base/mkiv/spac-hor.mkiv b/Master/texmf-dist/tex/context/base/mkiv/spac-hor.mkiv index 19f2f53b3f8..08e5f6343e8 100644 --- a/Master/texmf-dist/tex/context/base/mkiv/spac-hor.mkiv +++ b/Master/texmf-dist/tex/context/base/mkiv/spac-hor.mkiv @@ -631,7 +631,7 @@ \newskip\s_spac_narrower_left \newskip\s_spac_narrower_right -\newskip\s_spac_narrower_mid +\newskip\s_spac_narrower_middle \installcommandhandler \??narrower {narrower} \??narrower @@ -655,21 +655,21 @@ {\ifcsname\??narrowermethod#1\endcsname \lastnamedcs \else - \global\advance\s_spac_narrower_mid#1\relax + \global\advance\s_spac_narrower_middle#1\relax \fi} \def\spac_narrower_initialize[#1]% hm, can be dorepeat directly {\dorepeatwithcommand[#1]\spac_narrower_method_analyze} -\installnarrowermethod \v!left {\global\advance\s_spac_narrower_left \narrowerparameter\c!left \relax} -\installnarrowermethod \v!middle {\global\advance\s_spac_narrower_mid \narrowerparameter\c!middle\relax} -\installnarrowermethod \v!right {\global\advance\s_spac_narrower_right \narrowerparameter\c!right \relax} -\installnarrowermethod{-\v!left }{\global\advance\s_spac_narrower_left -\narrowerparameter\c!left \relax} -\installnarrowermethod{-\v!middle}{\global\advance\s_spac_narrower_mid -\narrowerparameter\c!middle\relax} -\installnarrowermethod{-\v!right }{\global\advance\s_spac_narrower_right-\narrowerparameter\c!right \relax} -\installnarrowermethod \v!reset {\global \s_spac_narrower_left \zeropoint - \global \s_spac_narrower_mid \zeropoint - \global \s_spac_narrower_right \zeropoint\relax} +\installnarrowermethod \v!left {\global\advance\s_spac_narrower_left \narrowerparameter\c!left \relax} +\installnarrowermethod \v!middle {\global\advance\s_spac_narrower_middle \narrowerparameter\c!middle\relax} +\installnarrowermethod \v!right {\global\advance\s_spac_narrower_right \narrowerparameter\c!right \relax} +\installnarrowermethod{-\v!left }{\global\advance\s_spac_narrower_left -\narrowerparameter\c!left \relax} +\installnarrowermethod{-\v!middle}{\global\advance\s_spac_narrower_middle-\narrowerparameter\c!middle\relax} +\installnarrowermethod{-\v!right }{\global\advance\s_spac_narrower_right -\narrowerparameter\c!right \relax} +\installnarrowermethod \v!reset {\global \s_spac_narrower_left \zeropoint + \global \s_spac_narrower_middle \zeropoint + \global \s_spac_narrower_right \zeropoint\relax} \installnarrowermethod \v!none {} \unexpanded\def\spac_narrower_start#1% @@ -686,12 +686,12 @@ \def\spac_narrower_start_apply#1% {\narrowerparameter\c!before - \global\s_spac_narrower_left \zeropoint - \global\s_spac_narrower_right\zeropoint - \global\s_spac_narrower_mid \zeropoint + \global\s_spac_narrower_left \zeropoint + \global\s_spac_narrower_right \zeropoint + \global\s_spac_narrower_middle\zeropoint \processcommalistwithparameters[#1]\spac_narrower_initialize - \advance\leftskip \dimexpr\s_spac_narrower_left +\s_spac_narrower_mid\relax - \advance\rightskip\dimexpr\s_spac_narrower_right+\s_spac_narrower_mid\relax + \advance\leftskip \dimexpr\s_spac_narrower_left +\s_spac_narrower_middle\relax + \advance\rightskip\dimexpr\s_spac_narrower_right+\s_spac_narrower_middle\relax \seteffectivehsize} \unexpanded\def\spac_narrower_stop diff --git a/Master/texmf-dist/tex/context/base/mkiv/status-files.pdf b/Master/texmf-dist/tex/context/base/mkiv/status-files.pdf index 50c9c743acf..c0eaf445aca 100644 Binary files a/Master/texmf-dist/tex/context/base/mkiv/status-files.pdf and b/Master/texmf-dist/tex/context/base/mkiv/status-files.pdf differ diff --git a/Master/texmf-dist/tex/context/base/mkiv/status-lua.pdf b/Master/texmf-dist/tex/context/base/mkiv/status-lua.pdf index c59d3c52fd2..c4c0c12ace9 100644 Binary files a/Master/texmf-dist/tex/context/base/mkiv/status-lua.pdf and b/Master/texmf-dist/tex/context/base/mkiv/status-lua.pdf differ diff --git a/Master/texmf-dist/tex/context/base/mkiv/task-ini.lua b/Master/texmf-dist/tex/context/base/mkiv/task-ini.lua index eaf342234de..696a3b4a95e 100644 --- a/Master/texmf-dist/tex/context/base/mkiv/task-ini.lua +++ b/Master/texmf-dist/tex/context/base/mkiv/task-ini.lua @@ -117,6 +117,7 @@ appendaction("finalizers", "lists", "builders.paragraphs.keeptogether") ------------("finalizers", "lists", "nodes.handlers.graphicvadjust") -- todo appendaction("finalizers", "fonts", "builders.paragraphs.solutions.splitters.optimize") -- experimental appendaction("finalizers", "lists", "builders.paragraphs.tag") +appendaction("finalizers", "lists", "nodes.linefillers.handler") -- still experimental @@ -191,6 +192,7 @@ disableaction("finalizers", "builders.paragraphs.keeptogether") disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize") -------------("finalizers", "nodes.handlers.graphicvadjust") -- sort of obsolete disableaction("finalizers", "builders.paragraphs.tag") +disableaction("finalizers", "nodes.linefillers.handler") disableaction("math", "noads.handlers.showtree") disableaction("math", "noads.handlers.tags") diff --git a/Master/texmf-dist/tex/context/base/mkiv/typo-lin.lua b/Master/texmf-dist/tex/context/base/mkiv/typo-lin.lua index 5fc26a3547b..a74a635f679 100644 --- a/Master/texmf-dist/tex/context/base/mkiv/typo-lin.lua +++ b/Master/texmf-dist/tex/context/base/mkiv/typo-lin.lua @@ -372,21 +372,21 @@ function paragraphs.moveinline(n,blob,dx,dy) end end -local f_anchor = formatters["_plib_.set('md:h',%i,{x=true,c=true})"] -local s_anchor = 'md:h' +-- local f_anchor = formatters["_plib_.set('md:h',%i,{x=true,c=true})"] +-- local s_anchor = 'md:h' +-- +-- local function setanchor(h_anchor) +-- return new_latelua(f_anchor(h_anchor)) +-- end + +local lateluafunction = nodepool.lateluafunction +local setposition = job.positions.set +local t_anchor = { x = true, c = true } local function setanchor(h_anchor) - return new_latelua(f_anchor(h_anchor)) + return lateluafunction(function() setposition("md:h",h_anchor,t_anchor) end) end --- local lateluafunction = nodepool.lateluafunction --- local setposition = job.positions.set --- local t_anchor = { x = true, c = true } - --- local function setanchor(h_anchor) --- return lateluafunction(function() setposition("md:h",h_anchor,t_anchor) end) --- end - function paragraphs.calculatedelta(n,width,delta,atleft,islocal,followshape,area) local line = type(n) ~= "table" and getprop(n,"line") or n if not line then -- cgit v1.2.3