This module is a bit more split up that I'd like but since we also want to test
@@ -170,12 +177,29 @@ registertracker("otf.injections","nodes.injections")
registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
-local insert_node_after = node.insert_after
-local delete_node = nodes.delete
-local copy_node = node.copy
-local find_node_tail = node.tail or node.slide
-local flush_node_list = node.flush_list
-local end_of_math = node.end_of_math
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local insert_node_after = nuts.insert_after
+local delete_node = nuts.delete
+local copy_node = nuts.copy
+local find_node_tail = nuts.tail
+local flush_node_list = nuts.flush_list
+local end_of_math = nuts.end_of_math
local setmetatableindex = table.setmetatableindex
@@ -187,6 +211,7 @@ local default = "dflt"
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
local glyphcodes = nodes.glyphcodes
+local disccodes = nodes.disccodes
local glyph_code = nodecodes.glyph
local glue_code = nodecodes.glue
@@ -197,6 +222,8 @@ local math_code = nodecodes.math
local dir_code = whatcodes.dir
local localpar_code = whatcodes.localpar
+local discretionary_code = disccodes.discretionary
+
local ligature_code = glyphcodes.ligature
local privateattribute = attributes.private
@@ -208,13 +235,7 @@ local privateattribute = attributes.private
-- of only some.
local a_state = privateattribute('state')
-local a_markbase = privateattribute('markbase')
-local a_markmark = privateattribute('markmark')
-local a_markdone = privateattribute('markdone') -- assigned at the injection end
-local a_cursbase = privateattribute('cursbase')
-local a_curscurs = privateattribute('curscurs')
-local a_cursdone = privateattribute('cursdone')
-local a_kernpair = privateattribute('kernpair')
+local a_cursbase = privateattribute('cursbase') -- to be checked
local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined)
local injections = nodes.injections
@@ -223,9 +244,7 @@ local setcursive = injections.setcursive
local setkern = injections.setkern
local setpair = injections.setpair
-local markonce = true
local cursonce = true
-local kernonce = true
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
@@ -328,11 +347,11 @@ end
-- and indices.
local function copy_glyph(g) -- next and prev are untouched !
- local components = g.components
+ local components = getfield(g,"components")
if components then
- g.components = nil
+ setfield(g,"components",nil)
local n = copy_node(g)
- g.components = components
+ setfield(g,"components",components)
return n
else
return copy_node(g)
@@ -342,28 +361,28 @@ end
-- start is a mark and we need to keep that one
local function markstoligature(kind,lookupname,head,start,stop,char)
- if start == stop and start.char == char then
+ if start == stop and getchar(start) == char then
return head, start
else
- local prev = start.prev
- local next = stop.next
- start.prev = nil
- stop.next = nil
+ local prev = getprev(start)
+ local next = getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
local base = copy_glyph(start)
if head == start then
head = base
end
- base.char = char
- base.subtype = ligature_code
- base.components = start
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start)
if prev then
- prev.next = base
+ setfield(prev,"next",base)
end
if next then
- next.prev = base
+ setfield(next,"prev",base)
end
- base.next = next
- base.prev = prev
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
return head, base
end
end
@@ -376,17 +395,17 @@ end
-- third component.
local function getcomponentindex(start)
- if start.id ~= glyph_code then
+ if getid(start) ~= glyph_code then
return 0
- elseif start.subtype == ligature_code then
+ elseif getsubtype(start) == ligature_code then
local i = 0
- local components = start.components
+ local components = getfield(start,"components")
while components do
i = i + getcomponentindex(components)
- components = components.next
+ components = getnext(components)
end
return i
- elseif not marks[start.char] then
+ elseif not marks[getchar(start)] then
return 1
else
return 0
@@ -396,29 +415,29 @@ end
-- eventually we will do positioning in an other way (needs addional w/h/d fields)
local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
- if start == stop and start.char == char then
- start.char = char
+ if start == stop and getchar(start) == char then
+ setfield(start,"char",char)
return head, start
end
- local prev = start.prev
- local next = stop.next
- start.prev = nil
- stop.next = nil
+ local prev = getprev(start)
+ local next = getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
local base = copy_glyph(start)
if start == head then
head = base
end
- base.char = char
- base.subtype = ligature_code
- base.components = start -- start can have components
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start) -- start can have components
if prev then
- prev.next = base
+ setfield(prev,"next",base)
end
if next then
- next.prev = base
+ setfield(next,"prev",base)
end
- base.next = next
- base.prev = prev
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
if not discfound then
local deletemarks = markflag ~= "mark"
local components = start
@@ -428,35 +447,35 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
local current = base
-- first we loop over the glyphs in start .. stop
while start do
- local char = start.char
+ local char = getchar(start)
if not marks[char] then
baseindex = baseindex + componentindex
componentindex = getcomponentindex(start)
elseif not deletemarks then -- quite fishy
- start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
+ setprop(start,a_ligacomp,baseindex + (getprop(start,a_ligacomp) or componentindex))
if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getprop(start,a_ligacomp))
end
head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components
elseif trace_marks then
logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
end
- start = start.next
+ start = getnext(start)
end
-- we can have one accent as part of a lookup and another following
-- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
- local start = current.next
- while start and start.id == glyph_code do
- local char = start.char
+ local start = getnext(current)
+ while start and getid(start) == glyph_code do
+ local char = getchar(start)
if marks[char] then
- start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
+ setprop(start,a_ligacomp,baseindex + (getprop(start,a_ligacomp) or componentindex))
if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getprop(start,a_ligacomp))
end
else
break
end
- start = start.next
+ start = getnext(start)
end
end
return head, base
@@ -464,9 +483,9 @@ end
function handlers.gsub_single(head,start,kind,lookupname,replacement)
if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
end
- start.char = replacement
+ setfield(start,"char",replacement)
return head, start, true
end
@@ -493,7 +512,7 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives
return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
end
elseif value == 0 then
- return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ return getchar(start), trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
elseif value < 1 then
return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
else
@@ -502,68 +521,73 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives
end
end
-local function multiple_glyphs(head,start,multiple) -- marks ?
+local function multiple_glyphs(head,start,multiple,ignoremarks)
local nofmultiples = #multiple
if nofmultiples > 0 then
- start.char = multiple[1]
+ setfield(start,"char",multiple[1])
if nofmultiples > 1 then
- local sn = start.next
+ local sn = getnext(start)
for k=2,nofmultiples do -- todo: use insert_node
+-- untested:
+--
+-- while ignoremarks and marks[getchar(sn)] then
+-- local sn = getnext(sn)
+-- end
local n = copy_node(start) -- ignore components
- n.char = multiple[k]
- n.next = sn
- n.prev = start
+ setfield(n,"char",multiple[k])
+ setfield(n,"next",sn)
+ setfield(n,"prev",start)
if sn then
- sn.prev = n
+ setfield(sn,"prev",n)
end
- start.next = n
+ setfield(start,"next",n)
start = n
end
end
return head, start, true
else
if trace_multiples then
- logprocess("no multiple for %s",gref(start.char))
+ logprocess("no multiple for %s",gref(getchar(start)))
end
return head, start, false
end
end
function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
+ local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
if choice then
if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
end
- start.char = choice
+ setfield(start,"char",choice)
else
if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment)
end
end
return head, start, true
end
-function handlers.gsub_multiple(head,start,kind,lookupname,multiple)
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple))
end
- return multiple_glyphs(head,start,multiple)
+ return multiple_glyphs(head,start,multiple,sequence.flags[1])
end
function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
- local s, stop, discfound = start.next, nil, false
- local startchar = start.char
+ local s, stop, discfound = getnext(start), nil, false
+ local startchar = getchar(start)
if marks[startchar] then
while s do
- local id = s.id
- if id == glyph_code and s.font == currentfont and s.subtype<256 then
- local lg = ligature[s.char]
+ local id = getid(s)
+ if id == glyph_code and getfont(s) == currentfont and getsubtype(s)<256 then
+ local lg = ligature[getchar(s)]
if lg then
stop = s
ligature = lg
- s = s.next
+ s = getnext(s)
else
break
end
@@ -575,9 +599,9 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
local lig = ligature.ligature
if lig then
if trace_ligatures then
- local stopchar = stop.char
+ local stopchar = getchar(stop)
head, start = markstoligature(kind,lookupname,head,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
else
head, start = markstoligature(kind,lookupname,head,start,stop,lig)
end
@@ -589,18 +613,18 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
else
local skipmark = sequence.flags[1]
while s do
- local id = s.id
- if id == glyph_code and s.subtype<256 then
- if s.font == currentfont then
- local char = s.char
+ local id = getid(s)
+ if id == glyph_code and getsubtype(s)<256 then
+ if getfont(s) == currentfont then
+ local char = getchar(s)
if skipmark and marks[char] then
- s = s.next
+ s = getnext(s)
else
local lg = ligature[char]
if lg then
stop = s
ligature = lg
- s = s.next
+ s = getnext(s)
else
break
end
@@ -610,25 +634,31 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
end
elseif id == disc_code then
discfound = true
- s = s.next
+ s = getnext(s)
else
break
end
end
- if stop then
- local lig = ligature.ligature
- if lig then
+ local lig = ligature.ligature
+ if lig then
+ if stop then
if trace_ligatures then
- local stopchar = stop.char
+ local stopchar = getchar(stop)
head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
else
head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
end
- return head, start, true
else
- -- ok, goto next lookup
+ -- weird but happens (in some arabic font)
+ setfield(start,"char",lig)
+ if trace_ligatures then
+ logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
+ end
end
+ return head, start, true
+ else
+ -- weird but happens
end
end
return head, start, false
@@ -640,16 +670,16 @@ we need to explicitly test for basechar, baselig and basemark entries.
--ldx]]--
function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
+ local base = getprev(start) -- [glyph] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
if marks[basechar] then
while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
if not marks[basechar] then
break
end
@@ -673,7 +703,7 @@ function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -701,16 +731,16 @@ end
function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
-- check chainpos variant
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
+ local base = getprev(start) -- [glyph] [optional marks] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
if marks[basechar] then
while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
if not marks[basechar] then
break
end
@@ -722,7 +752,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
end
end
- local index = start[a_ligacomp]
+ local index = getprop(start,a_ligacomp)
local baseanchors = descriptions[basechar]
if baseanchors then
baseanchors = baseanchors.anchors
@@ -736,7 +766,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
if ma then
ba = ba[index]
if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) -- index
if trace_marks then
logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -769,22 +799,22 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- local slc = start[a_ligacomp]
+ local base = getprev(start) -- [glyph] [basemark] [start=mark]
+ local slc = getprop(start,a_ligacomp)
if slc then -- a rather messy loop ... needs checking with husayni
while base do
- local blc = base[a_ligacomp]
+ local blc = getprop(base,a_ligacomp)
if blc and blc ~= slc then
- base = base.prev
+ base = getprev(base)
else
break
end
end
end
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
- local basechar = base.char
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
+ local basechar = getchar(base)
local baseanchors = descriptions[basechar]
if baseanchors then
baseanchors = baseanchors.anchors
@@ -796,7 +826,7 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -824,21 +854,21 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
end
function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and start[a_cursbase]
+ local alreadydone = cursonce and getprop(start,a_cursbase)
if not alreadydone then
local done = false
- local startchar = start.char
+ local startchar = getchar(start)
if marks[startchar] then
if trace_cursive then
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
- local nextchar = nxt.char
+ local nxt = getnext(start)
+ while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
+ local nextchar = getchar(nxt)
if marks[nextchar] then
-- should not happen (maybe warning)
- nxt = nxt.next
+ nxt = getnext(nxt)
else
local entryanchors = descriptions[nextchar]
if entryanchors then
@@ -873,14 +903,14 @@ function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
return head, start, done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
end
return head, start, false
end
end
function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
- local startchar = start.char
+ local startchar = getchar(start)
local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
@@ -891,35 +921,34 @@ end
function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
-- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
-- todo: kerns in components of ligatures
- local snext = start.next
+ local snext = getnext(start)
if not snext then
return head, start, false
else
local prev, done = start, false
local factor = tfmdata.parameters.factor
local lookuptype = lookuptypes[lookupname]
- while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
- local nextchar = snext.char
+ while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
+ local nextchar = getchar(snext)
local krn = kerns[nextchar]
if not krn and marks[nextchar] then
prev = snext
- snext = snext.next
+ snext = getnext(snext)
else
- local krn = kerns[nextchar]
if not krn then
-- skip
elseif type(krn) == "table" then
if lookuptype == "pair" then -- probably not needed
local a, b = krn[2], krn[3]
if a and #a > 0 then
- local startchar = start.char
+ local startchar = getchar(start)
local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
- local startchar = start.char
+ local startchar = getchar(start)
local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
@@ -931,7 +960,7 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
-- if a and a ~= 0 then
-- local k = setkern(snext,factor,rlmode,a)
-- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
-- end
-- end
-- if b and b ~= 0 then
@@ -942,7 +971,7 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
elseif krn ~= 0 then
local k = setkern(snext,factor,rlmode,krn)
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
end
done = true
end
@@ -997,13 +1026,13 @@ end
-- itself. It is meant mostly for dealing with Urdu.
function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char = start.char
+ local char = getchar(start)
local replacement = replacements[char]
if replacement then
if trace_singles then
logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
end
- start.char = replacement
+ setfield(start,"char",replacement)
return head, start, true
else
return head, start, false
@@ -1026,35 +1055,35 @@ single lookup case. The efficiency of the replacements can be improved by deleti
as less as needed but that would also make the code even more messy.
--ldx]]--
-local function delete_till_stop(start,stop,ignoremarks) -- keeps start
- local n = 1
- if start == stop then
- -- done
- elseif ignoremarks then
- repeat -- start x x m x x stop => start m
- local next = start.next
- if not marks[next.char] then
- local components = next.components
- if components then -- probably not needed
- flush_node_list(components)
- end
- delete_node(start,next)
- end
- n = n + 1
- until next == stop
- else -- start x x x stop => start
- repeat
- local next = start.next
- local components = next.components
- if components then -- probably not needed
- flush_node_list(components)
- end
- delete_node(start,next)
- n = n + 1
- until next == stop
- end
- return n
-end
+-- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start
+-- local n = 1
+-- if start == stop then
+-- -- done
+-- elseif ignoremarks then
+-- repeat -- start x x m x x stop => start m
+-- local next = getnext(start)
+-- if not marks[getchar(next)] then
+-- local components = getfield(next,"components")
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- end
+-- n = n + 1
+-- until next == stop
+-- else -- start x x x stop => start
+-- repeat
+-- local next = getnext(start)
+-- local components = getfield(next,"components")
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- n = n + 1
+-- until next == stop
+-- end
+-- return head, n
+-- end
--[[ldx--
Here we replace start by a single variant, First we delete the rest of the
@@ -1069,8 +1098,8 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
end
while current do
- if current.id == glyph_code then
- local currentchar = current.char
+ if getid(current) == glyph_code then
+ local currentchar = getchar(current)
local lookupname = subtables[1] -- only 1
local replacement = lookuphash[lookupname]
if not replacement then
@@ -1087,14 +1116,14 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
if trace_singles then
logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
end
- current.char = replacement
+ setfield(current,"char",replacement)
end
end
return head, start, true
elseif current == stop then
break
else
- current = current.next
+ current = getnext(current)
end
end
return head, start, false
@@ -1108,8 +1137,8 @@ the match.
--ldx]]--
function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- delete_till_stop(start,stop) -- we could pass ignoremarks as #3 ..
- local startchar = start.char
+ -- local head, n = delete_till_stop(head,start,stop)
+ local startchar = getchar(start)
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local replacements = lookuphash[lookupname]
@@ -1127,7 +1156,7 @@ function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,
if trace_multiples then
logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
end
- return multiple_glyphs(head,start,replacements)
+ return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
end
end
return head, start, false
@@ -1152,8 +1181,8 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
local subtables = currentlookup.subtables
local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
while current do
- if current.id == glyph_code then -- is this check needed?
- local currentchar = current.char
+ if getid(current) == glyph_code then -- is this check needed?
+ local currentchar = getchar(current)
local lookupname = subtables[1]
local alternatives = lookuphash[lookupname]
if not alternatives then
@@ -1168,7 +1197,7 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
if trace_alternatives then
logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
end
- start.char = choice
+ setfield(start,"char",choice)
else
if trace_alternatives then
logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
@@ -1182,7 +1211,7 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
elseif current == stop then
break
else
- current = current.next
+ current = getnext(current)
end
end
return head, start, false
@@ -1197,7 +1226,7 @@ assume rather stupid ligatures (no complex disc nodes).
--ldx]]--
function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar = start.char
+ local startchar = getchar(start)
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local ligatures = lookuphash[lookupname]
@@ -1212,20 +1241,20 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
end
else
- local s = start.next
+ local s = getnext(start)
local discfound = false
local last = stop
local nofreplacements = 0
local skipmark = currentlookup.flags[1]
while s do
- local id = s.id
+ local id = getid(s)
if id == disc_code then
- s = s.next
+ s = getnext(s)
discfound = true
else
- local schar = s.char
+ local schar = getchar(s)
if skipmark and marks[schar] then -- marks
- s = s.next
+ s = getnext(s)
else
local lg = ligatures[schar]
if lg then
@@ -1233,7 +1262,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if s == stop then
break
else
- s = s.next
+ s = getnext(s)
end
else
break
@@ -1250,7 +1279,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if start == stop then
logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
else
- logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
end
end
head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
@@ -1259,7 +1288,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if start == stop then
logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
end
end
end
@@ -1270,7 +1299,7 @@ end
chainmores.gsub_ligature = chainprocs.gsub_ligature
function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
local subtables = currentlookup.subtables
local lookupname = subtables[1]
@@ -1279,14 +1308,14 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
markanchors = markanchors[markchar]
end
if markanchors then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
+ local base = getprev(start) -- [glyph] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
if marks[basechar] then
while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
if not marks[basechar] then
break
end
@@ -1307,7 +1336,7 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -1334,7 +1363,7 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
end
function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
local subtables = currentlookup.subtables
local lookupname = subtables[1]
@@ -1343,14 +1372,14 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
markanchors = markanchors[markchar]
end
if markanchors then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
+ local base = getprev(start) -- [glyph] [optional marks] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
if marks[basechar] then
while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
if not marks[basechar] then
break
end
@@ -1363,7 +1392,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
end
-- todo: like marks a ligatures hash
- local index = start[a_ligacomp]
+ local index = getprop(start,a_ligacomp)
local baseanchors = descriptions[basechar].anchors
if baseanchors then
local baseanchors = baseanchors['baselig']
@@ -1375,7 +1404,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
if ma then
ba = ba[index]
if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -1403,64 +1432,59 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
- -- local alreadydone = markonce and start[a_markmark]
- -- if not alreadydone then
- -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- local slc = start[a_ligacomp]
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = base[a_ligacomp]
- if blc and blc ~= slc then
- base = base.prev
- else
- break
- end
+ -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = getprev(start) -- [glyph] [basemark] [start=mark]
+ local slc = getprop(start,a_ligacomp)
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = getprop(base,a_ligacomp)
+ if blc and blc ~= slc then
+ base = getprev(base)
+ else
+ break
end
end
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar].anchors
+ end
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
+ local basechar = getchar(base)
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors = baseanchors['basemark']
if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
+ return head, start, true
end
end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
end
end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
end
elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
end
- -- elseif trace_marks and trace_details then
- -- logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone)
- -- end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
elseif trace_bugs then
logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
end
@@ -1468,9 +1492,9 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
end
function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone = cursonce and start[a_cursbase]
+ local alreadydone = cursonce and getprop(start,a_cursbase)
if not alreadydone then
- local startchar = start.char
+ local startchar = getchar(start)
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local exitanchors = lookuphash[lookupname]
@@ -1484,12 +1508,12 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
- local nextchar = nxt.char
+ local nxt = getnext(start)
+ while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
+ local nextchar = getchar(nxt)
if marks[nextchar] then
-- should not happen (maybe warning)
- nxt = nxt.next
+ nxt = getnext(nxt)
else
local entryanchors = descriptions[nextchar]
if entryanchors then
@@ -1524,7 +1548,7 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
return head, start, done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
end
return head, start, false
end
@@ -1534,7 +1558,7 @@ end
function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
-- untested .. needs checking for the new model
- local startchar = start.char
+ local startchar = getchar(start)
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local kerns = lookuphash[lookupname]
@@ -1550,13 +1574,14 @@ function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lo
return head, start, false
end
+chainmores.gpos_single = chainprocs.gpos_single -- okay?
+
-- when machines become faster i will make a shared function
function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
--- logwarning("%s: gpos_pair not yet supported",cref(kind,chainname,chainlookupname))
- local snext = start.next
+ local snext = getnext(start)
if snext then
- local startchar = start.char
+ local startchar = getchar(start)
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local kerns = lookuphash[lookupname]
@@ -1566,12 +1591,12 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
local lookuptype = lookuptypes[lookupname]
local prev, done = start, false
local factor = tfmdata.parameters.factor
- while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
- local nextchar = snext.char
+ while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
+ local nextchar = getchar(snext)
local krn = kerns[nextchar]
if not krn and marks[nextchar] then
prev = snext
- snext = snext.next
+ snext = getnext(snext)
else
if not krn then
-- skip
@@ -1579,14 +1604,14 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
if lookuptype == "pair" then
local a, b = krn[2], krn[3]
if a and #a > 0 then
- local startchar = start.char
+ local startchar = getchar(start)
local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
- local startchar = start.char
+ local startchar = getchar(start)
local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
@@ -1598,7 +1623,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
if a and a ~= 0 then
local k = setkern(snext,factor,rlmode,a)
if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
end
end
if b and b ~= 0 then
@@ -1609,7 +1634,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
elseif krn ~= 0 then
local k = setkern(snext,factor,rlmode,krn)
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
end
done = true
end
@@ -1623,6 +1648,8 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
return head, start, false
end
+chainmores.gpos_pair = chainprocs.gpos_pair -- okay?
+
-- what pointer to return, spec says stop
-- to be discussed ... is bidi changer a space?
-- elseif char == zwnj and sequence[n][32] then -- brrr
@@ -1639,6 +1666,12 @@ local function show_skip(kind,chainname,char,ck,class)
end
end
+local quit_on_no_replacement = true
+
+directives.register("otf.chain.quitonnoreplacement",function(value) -- maybe per font
+ quit_on_no_replacement = value
+end)
+
local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
-- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
local flags = sequence.flags
@@ -1659,7 +1692,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
-- f..l = mid string
if s == 1 then
-- never happens
- match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char]
+ match = getid(current) == glyph_code and getfont(current) == currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
else
-- maybe we need a better space check (maybe check for glue or category or combination)
-- we cannot optimize for n=2 because there can be disc nodes
@@ -1674,13 +1707,13 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
-- match = true
else
local n = f + 1
- last = last.next
+ last = getnext(last)
while n <= l do
if last then
- local id = last.id
+ local id = getid(last)
if id == glyph_code then
- if last.font == currentfont and last.subtype<256 then
- local char = last.char
+ if getfont(last) == currentfont and getsubtype(last)<256 then
+ local char = getchar(last)
local ccd = descriptions[char]
if ccd then
local class = ccd.class
@@ -1689,10 +1722,10 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if trace_skips then
show_skip(kind,chainname,char,ck,class)
end
- last = last.next
+ last = getnext(last)
elseif seq[n][char] then
if n < l then
- last = last.next
+ last = getnext(last)
end
n = n + 1
else
@@ -1708,7 +1741,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
break
end
elseif id == disc_code then
- last = last.next
+ last = getnext(last)
else
match = false
break
@@ -1722,15 +1755,15 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
-- before
if match and f > 1 then
- local prev = start.prev
+ local prev = getprev(start)
if prev then
local n = f-1
while n >= 1 do
if prev then
- local id = prev.id
+ local id = getid(prev)
if id == glyph_code then
- if prev.font == currentfont and prev.subtype<256 then -- normal char
- local char = prev.char
+ if getfont(prev) == currentfont and getsubtype(prev)<256 then -- normal char
+ local char = getchar(prev)
local ccd = descriptions[char]
if ccd then
local class = ccd.class
@@ -1761,7 +1794,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
match = false
break
end
- prev = prev.prev
+ prev = getprev(prev)
elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
n = n -1
else
@@ -1782,16 +1815,16 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
-- after
if match and s > l then
- local current = last and last.next
+ local current = last and getnext(last)
if current then
-- removed optimization for s-l == 1, we have to deal with marks anyway
local n = l + 1
while n <= s do
if current then
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- if current.font == currentfont and current.subtype<256 then -- normal char
- local char = current.char
+ if getfont(current) == currentfont and getsubtype(current)<256 then -- normal char
+ local char = getchar(current)
local ccd = descriptions[char]
if ccd then
local class = ccd.class
@@ -1822,7 +1855,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
match = false
break
end
- current = current.next
+ current = getnext(current)
elseif seq[n][32] then
n = n + 1
else
@@ -1846,7 +1879,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
-- ck == currentcontext
if trace_contexts then
local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
- local char = start.char
+ local char = getchar(start)
if ck[9] then
logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
@@ -1865,7 +1898,11 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if chainlookup then
local cp = chainprocs[chainlookup.type]
if cp then
- head, start, done = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ local ok
+ head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ if ok then
+ done = true
+ end
else
logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
end
@@ -1877,12 +1914,12 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
repeat
if skipped then
while true do
- local char = start.char
+ local char = getchar(start)
local ccd = descriptions[char]
if ccd then
local class = ccd.class
if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = start.next
+ start = getnext(start)
else
break
end
@@ -1892,25 +1929,31 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
local chainlookupname = chainlookups[i]
- local chainlookup = lookuptable[chainlookupname] -- can be false (n matches, 254
+--
+-- attr = attr or false
+--
+-- local a = getattr(start,0)
+-- if (a == attr and (not attribute or getprop(start,a_state) == attribute)) or (not attribute or getprop(start,a_state) == attribute) then
+-- -- the action
+-- end
+
local function featuresprocessor(head,font,attr)
local lookuphash = lookuphashes[font] -- we can also check sequences here
@@ -2089,28 +2141,30 @@ local function featuresprocessor(head,font,attr)
return head, false
end
+ head = tonut(head)
+
if trace_steps then
checkstep(head)
end
- tfmdata = fontdata[font]
- descriptions = tfmdata.descriptions
- characters = tfmdata.characters
- resources = tfmdata.resources
+ tfmdata = fontdata[font]
+ descriptions = tfmdata.descriptions
+ characters = tfmdata.characters
+ resources = tfmdata.resources
- marks = resources.marks
- anchorlookups = resources.lookup_to_anchor
- lookuptable = resources.lookups
- lookuptypes = resources.lookuptypes
+ marks = resources.marks
+ anchorlookups = resources.lookup_to_anchor
+ lookuptable = resources.lookups
+ lookuptypes = resources.lookuptypes
- currentfont = font
- rlmode = 0
+ currentfont = font
+ rlmode = 0
- local sequences = resources.sequences
- local done = false
- local datasets = otf.dataset(tfmdata,font,attr)
+ local sequences = resources.sequences
+ local done = false
+ local datasets = otf.dataset(tfmdata,font,attr)
- local dirstack = { } -- could move outside function
+ local dirstack = { } -- could move outside function
-- We could work on sub start-stop ranges instead but I wonder if there is that
-- much speed gain (experiments showed that it made not much sense) and we need
@@ -2120,246 +2174,436 @@ local function featuresprocessor(head,font,attr)
-- Keeping track of the headnode is needed for devanagari (I generalized it a bit
-- so that multiple cases are also covered.)
--- for s=1,#sequences do
--- local dataset = datasets[s]
--- if dataset then
--- featurevalue = dataset[1] -- todo: pass to function instead of using a global
--- if featurevalue then -- never false
-
-for s=1,#datasets do
- local dataset = datasets[s]
- featurevalue = dataset[1] -- todo: pass to function instead of using a global
-
- local sequence = dataset[5] -- sequences[s] -- also dataset[5]
- local rlparmode = 0
- local topstack = 0
- local success = false
- local attribute = dataset[2]
- local chain = dataset[3] -- sequence.chain or 0
- local typ = sequence.type
- local subtables = sequence.subtables
- if chain < 0 then
- -- this is a limited case, no special treatments like 'init' etc
- local handler = handlers[typ]
- -- we need to get rid of this slide! probably no longer needed in latest luatex
- local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
+ -- todo: retain prev
+
+ for s=1,#datasets do
+ local dataset = datasets[s]
+ featurevalue = dataset[1] -- todo: pass to function instead of using a global
+
+ local sequence = dataset[5] -- sequences[s] -- also dataset[5]
+ local rlparmode = 0
+ local topstack = 0
+ local success = false
+ local attribute = dataset[2]
+ local chain = dataset[3] -- sequence.chain or 0
+ local typ = sequence.type
+ local subtables = sequence.subtables
+ if chain < 0 then
+ -- this is a limited case, no special treatments like 'init' etc
+ local handler = handlers[typ]
+ -- we need to get rid of this slide! probably no longer needed in latest luatex
+ local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
+ while start do
+ local id = getid(start)
+ if id == glyph_code then
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = a == attr
+ else
+ a = true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = getprev(start) end
+ else
+ start = getprev(start)
+ end
+ else
+ start = getprev(start)
+ end
+ else
+ start = getprev(start)
+ end
+ end
+ else
+ local handler = handlers[typ]
+ local ns = #subtables
+ local start = head -- local ?
+ rlmode = 0 -- to be checked ?
+ if ns == 1 then -- happens often
+ local lookupname = subtables[1]
+ local lookupcache = lookuphash[lookupname]
+ if not lookupcache then -- also check for empty cache
+ report_missing_cache(typ,lookupname)
+ else
+
+ local function subrun(start)
+ -- mostly for gsub, gpos would demand a more clever approach
+ local head = start
+ local done = false
+ while start do
+ local id = getid(start)
+ if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ end
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ end
+ if done then
+ success = true
+ return head
+ end
+ end
+
+ local function kerndisc(disc) -- we can assume that prev and next are glyphs
+ local prev = getprev(disc)
+ local next = getnext(disc)
+ if prev and next then
+ setfield(prev,"next",next)
+ -- setfield(next,"prev",prev)
+ local a = getattr(prev,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
+ else
+ a = not attribute or getprop(prev,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(prev)]
+ if lookupmatch then
+ -- sequence kan weg
+ local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ success = true
+ end
+ end
+ end
+ setfield(prev,"next",disc)
+ -- setfield(next,"prev",disc)
+ end
+ return next
+ end
+
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
if a then
- a = a == attr
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
else
- a = true
+ a = not attribute or getprop(start,a_state) == attribute
end
if a then
- for i=1,#subtables do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if success then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success = true
end
end
- if start then start = start.prev end
+ if start then start = getnext(start) end
else
- start = start.prev
+ start = getnext(start)
end
else
- start = start.prev
+ start = getnext(start)
+ end
+ elseif id == disc_code then
+ -- mostly for gsub
+ if getsubtype(start) == discretionary_code then
+ local pre = getfield(start,"pre")
+ if pre then
+ local new = subrun(pre)
+ if new then setfield(start,"pre",new) end
+ end
+ local post = getfield(start,"post")
+ if post then
+ local new = subrun(post)
+ if new then setfield(start,"post",new) end
+ end
+ local replace = getfield(start,"replace")
+ if replace then
+ local new = subrun(replace)
+ if new then setfield(start,"replace",new) end
+ end
+elseif typ == "gpos_single" or typ == "gpos_pair" then
+ kerndisc(start)
+ end
+ start = getnext(start)
+ elseif id == whatsit_code then -- will be function
+ local subtype = getsubtype(start)
+ if subtype == dir_code then
+ local dir = getfield(start,"dir")
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype == localpar_code then
+ local dir = getfield(start,"dir")
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ -- one might wonder if the par dir should be looked at, so we might as well drop the next line
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
end
+ start = getnext(start)
+ elseif id == math_code then
+ start = getnext(end_of_math(start))
else
- start = start.prev
+ start = getnext(start)
end
end
- else
- local handler = handlers[typ]
- local ns = #subtables
- local start = head -- local ?
- rlmode = 0 -- to be checked ?
- if ns == 1 then -- happens often
- local lookupname = subtables[1]
- local lookupcache = lookuphash[lookupname]
- if not lookupcache then -- also check for empty cache
- report_missing_cache(typ,lookupname)
- else
- while start do
- local id = start.id
- if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
- if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
- else
- a = not attribute or start[a_state] == attribute
- end
- if a then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- success = true
- end
+ end
+ else
+
+ local function subrun(start)
+ -- mostly for gsub, gpos would demand a more clever approach
+ local head = start
+ local done = false
+ while start do
+ local id = getid(start)
+ if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
end
- if start then start = start.next end
- else
- start = start.next
end
- elseif id == math_code then
- start = end_of_math(start).next
else
- start = start.next
+ report_missing_cache(typ,lookupname)
end
- elseif id == whatsit_code then -- will be function
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ end
+ if done then
+ success = true
+ return head
+ end
+ end
+
+ local function kerndisc(disc) -- we can assume that prev and next are glyphs
+ local prev = getprev(disc)
+ local next = getnext(disc)
+ if prev and next then
+ setfield(prev,"next",next)
+ -- setfield(next,"prev",prev)
+ local a = getattr(prev,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
+ else
+ a = not attribute or getprop(prev,a_state) == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[getchar(prev)]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
end
end
- start = start.next
- elseif id == math_code then
- start = end_of_math(start).next
else
- start = start.next
+ report_missing_cache(typ,lookupname)
end
end
end
- else
- while start do
- local id = start.id
- if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
- if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
- else
- a = not attribute or start[a_state] == attribute
- end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- success = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
+ setfield(prev,"next",disc)
+ -- setfield(next,"prev",disc)
+ end
+ return next
+ end
+
+ while start do
+ local id = getid(start)
+ if id == glyph_code then
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
end
end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- elseif id == whatsit_code then
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ report_missing_cache(typ,lookupname)
end
end
- start = start.next
- elseif id == math_code then
- start = end_of_math(start).next
+ if start then start = getnext(start) end
else
- start = start.next
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ elseif id == disc_code then
+ -- mostly for gsub
+ if getsubtype(start) == discretionary_code then
+ local pre = getfield(start,"pre")
+ if pre then
+ local new = subrun(pre)
+ if new then setfield(start,"pre",new) end
+ end
+ local post = getfield(start,"post")
+ if post then
+ local new = subrun(post)
+ if new then setfield(start,"post",new) end
+ end
+ local replace = getfield(start,"replace")
+ if replace then
+ local new = subrun(replace)
+ if new then setfield(start,"replace",new) end
end
+elseif typ == "gpos_single" or typ == "gpos_pair" then
+ kerndisc(start)
end
+ start = getnext(start)
+ elseif id == whatsit_code then
+ local subtype = getsubtype(start)
+ if subtype == dir_code then
+ local dir = getfield(start,"dir")
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype == localpar_code then
+ local dir = getfield(start,"dir")
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start = getnext(start)
+ elseif id == math_code then
+ start = getnext(end_of_math(start))
+ else
+ start = getnext(start)
end
end
- if success then
- done = true
- end
- if trace_steps then -- ?
- registerstep(head)
- end
+ end
+ end
+ if success then
+ done = true
+ end
+ if trace_steps then -- ?
+ registerstep(head)
+ end
+ end
--- end
--- else
--- -- report_process("warning, no dataset %a",s)
--- end
+ head = tonode(head)
- end
return head, done
end
diff --git a/Master/texmf-dist/tex/context/base/font-otp.lua b/Master/texmf-dist/tex/context/base/font-otp.lua
index 217bb7535fa..c80ee86ae61 100644
--- a/Master/texmf-dist/tex/context/base/font-otp.lua
+++ b/Master/texmf-dist/tex/context/base/font-otp.lua
@@ -407,6 +407,14 @@ local function packdata(data)
features[script] = pack_normal(feature)
end
end
+ local order = sequence.order
+ if order then
+ sequence.order = pack_indexed(order)
+ end
+ local markclass = sequence.markclass
+ if markclass then
+ sequence.markclass = pack_boolean(markclass)
+ end
end
end
local lookups = resources.lookups
@@ -825,6 +833,20 @@ local function unpackdata(data)
end
end
end
+ local order = feature.order
+ if order then
+ local tv = tables[order]
+ if tv then
+ feature.order = tv
+ end
+ end
+ local markclass = feature.markclass
+ if markclass then
+ local tv = tables[markclass]
+ if tv then
+ feature.markclass = tv
+ end
+ end
end
end
local lookups = resources.lookups
diff --git a/Master/texmf-dist/tex/context/base/font-ott.lua b/Master/texmf-dist/tex/context/base/font-ott.lua
index e3aacd0d1bd..1e23090565b 100644
--- a/Master/texmf-dist/tex/context/base/font-ott.lua
+++ b/Master/texmf-dist/tex/context/base/font-ott.lua
@@ -42,6 +42,7 @@ local scripts = allocate {
['cprt'] = 'cypriot syllabary',
['cyrl'] = 'cyrillic',
['deva'] = 'devanagari',
+ ['dev2'] = 'devanagari variant 2',
['dsrt'] = 'deseret',
['ethi'] = 'ethiopic',
['geor'] = 'georgian',
@@ -67,6 +68,7 @@ local scripts = allocate {
['linb'] = 'linear b',
['math'] = 'mathematical alphanumeric symbols',
['mlym'] = 'malayalam',
+ ['mlm2'] = 'malayalam variant 2',
['mong'] = 'mongolian',
['musc'] = 'musical symbols',
['mymr'] = 'myanmar',
@@ -631,6 +633,7 @@ local features = allocate {
['js..'] = 'justification ..',
["dv.."] = "devanagari ..",
+ ["ml.."] = "malayalam ..",
}
local baselines = allocate {
diff --git a/Master/texmf-dist/tex/context/base/font-otx.lua b/Master/texmf-dist/tex/context/base/font-otx.lua
index 5c41ad66faf..dc0469e398e 100644
--- a/Master/texmf-dist/tex/context/base/font-otx.lua
+++ b/Master/texmf-dist/tex/context/base/font-otx.lua
@@ -30,14 +30,28 @@ analyzers.methods = methods
local a_state = attributes.private('state')
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local traverse_id = nuts.traverse_id
+local traverse_node_list = nuts.traverse
+local end_of_math = nuts.end_of_math
+
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
local math_code = nodecodes.math
-local traverse_id = node.traverse_id
-local traverse_node_list = node.traverse
-local end_of_math = node.end_of_math
-
local fontdata = fonts.hashes.identifiers
local categories = characters and characters.categories or { } -- sorry, only in context
local chardata = characters and characters.data
@@ -79,6 +93,12 @@ local features = {
fina = s_fina,
isol = s_isol,
-- mark = s_mark,
+ -- rest = s_rest,
+ rphf = s_rphf,
+ half = s_half,
+ pref = s_pref,
+ blwf = s_blwf,
+ pstf = s_pstf,
}
analyzers.states = states
@@ -88,60 +108,61 @@ analyzers.useunicodemarks = false
-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
-function analyzers.setstate(head,font)
+function analyzers.setstate(head,font) -- we can skip math
local useunicodemarks = analyzers.useunicodemarks
local tfmdata = fontdata[font]
local descriptions = tfmdata.descriptions
local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
+ current = tonut(current)
while current do
- local id = current.id
- if id == glyph_code and current.font == font then
+ local id = getid(current)
+ if id == glyph_code and getfont(current) == font then
done = true
- local char = current.char
+ local char = getchar(current)
local d = descriptions[char]
if d then
if d.class == "mark" then
done = true
- current[a_state] = s_mark
+ setprop(current,a_state,s_mark)
elseif useunicodemarks and categories[char] == "mn" then
done = true
- current[a_state] = s_mark
+ setprop(current,a_state,s_mark)
elseif n == 0 then
first, last, n = current, current, 1
- current[a_state] = s_init
+ setprop(current,a_state,s_init)
else
last, n = current, n+1
- current[a_state] = s_medi
+ setprop(current,a_state,s_medi)
end
else -- finish
if first and first == last then
- last[a_state] = s_isol
+ setprop(last,a_state,s_isol)
elseif last then
- last[a_state] = s_fina
+ setprop(last,a_state,s_fina)
end
first, last, n = nil, nil, 0
end
elseif id == disc_code then
-- always in the middle
- current[a_state] = s_midi
+ setprop(current,a_state,s_medi)
last = current
else -- finish
if first and first == last then
- last[a_state] = s_isol
+ setprop(last,a_state,s_isol)
elseif last then
- last[a_state] = s_fina
+ setprop(last,a_state,s_fina)
end
first, last, n = nil, nil, 0
if id == math_code then
current = end_of_math(current)
end
end
- current = current.next
+ current = getnext(current)
end
if first and first == last then
- last[a_state] = s_isol
+ setprop(last,a_state,s_isol)
elseif last then
- last[a_state] = s_fina
+ setprop(last,a_state,s_fina)
end
return head, done
end
@@ -184,7 +205,7 @@ end
registerotffeature {
name = "analyze",
- description = "analysis of (for instance) character classes",
+ description = "analysis of character classes",
default = true,
initializers = {
node = analyzeinitializer,
@@ -202,7 +223,7 @@ methods.latn = analyzers.setstate
local arab_warned = { }
local function warning(current,what)
- local char = current.char
+ local char = getchar(current)
if not arab_warned[char] then
log.report("analyze","arab: character %C has no %a class",char,what)
arab_warned[char] = true
@@ -254,94 +275,95 @@ function methods.arab(head,font,attr)
local first, last = nil, nil
local c_first, c_last = nil, nil
local current, done = head, false
+ current = tonut(current)
while current do
- local id = current.id
- if id == glyph_code and current.font == font and current.subtype<256 and not current[a_state] then
+ local id = getid(current)
+ if id == glyph_code and getfont(current) == font and getsubtype(current)<256 and not getprop(current,a_state) then
done = true
- local char = current.char
+ local char = getchar(current)
local classifier = classifiers[char]
if not classifier then
if last then
if c_last == s_medi or c_last == s_fina then
- last[a_state] = s_fina
+ setprop(last,a_state,s_fina)
else
warning(last,"fina")
- last[a_state] = s_error
+ setprop(last,a_state,s_error)
end
first, last = nil, nil
elseif first then
if c_first == s_medi or c_first == s_fina then
- first[a_state] = s_isol
+ setprop(first,a_state,s_isol)
else
warning(first,"isol")
- first[a_state] = s_error
+ setprop(first,a_state,s_error)
end
first = nil
end
elseif classifier == s_mark then
- current[a_state] = s_mark
+ setprop(current,a_state,s_mark)
elseif classifier == s_isol then
if last then
if c_last == s_medi or c_last == s_fina then
- last[a_state] = s_fina
+ setprop(last,a_state,s_fina)
else
warning(last,"fina")
- last[a_state] = s_error
+ setprop(last,a_state,s_error)
end
first, last = nil, nil
elseif first then
if c_first == s_medi or c_first == s_fina then
- first[a_state] = s_isol
+ setprop(first,a_state,s_isol)
else
warning(first,"isol")
- first[a_state] = s_error
+ setprop(first,a_state,s_error)
end
first = nil
end
- current[a_state] = s_isol
+ setprop(current,a_state,s_isol)
elseif classifier == s_medi then
if first then
last = current
c_last = classifier
- current[a_state] = s_medi
+ setprop(current,a_state,s_medi)
else
- current[a_state] = s_init
+ setprop(current,a_state,s_init)
first = current
c_first = classifier
end
elseif classifier == s_fina then
if last then
- if last[a_state] ~= s_init then
- last[a_state] = s_medi
+ if getprop(last,a_state) ~= s_init then
+ setprop(last,a_state,s_medi)
end
- current[a_state] = s_fina
+ setprop(current,a_state,s_fina)
first, last = nil, nil
elseif first then
- -- if first[a_state] ~= s_init then
+ -- if getprop(first,a_state) ~= s_init then
-- -- needs checking
- -- first[a_state] = s_medi
+ -- setprop(first,a_state,s_medi)
-- end
- current[a_state] = s_fina
+ setprop(current,a_state,s_fina)
first = nil
else
- current[a_state] = s_isol
+ setprop(current,a_state,s_isol)
end
else -- classifier == s_rest
- current[a_state] = s_rest
+ setprop(current,a_state,s_rest)
if last then
if c_last == s_medi or c_last == s_fina then
- last[a_state] = s_fina
+ setprop(last,a_state,s_fina)
else
warning(last,"fina")
- last[a_state] = s_error
+ setprop(last,a_state,s_error)
end
first, last = nil, nil
elseif first then
if c_first == s_medi or c_first == s_fina then
- first[a_state] = s_isol
+ setprop(first,a_state,s_isol)
else
warning(first,"isol")
- first[a_state] = s_error
+ setprop(first,a_state,s_error)
end
first = nil
end
@@ -349,18 +371,18 @@ function methods.arab(head,font,attr)
else
if last then
if c_last == s_medi or c_last == s_fina then
- last[a_state] = s_fina
+ setprop(last,a_state,s_fina)
else
warning(last,"fina")
- last[a_state] = s_error
+ setprop(last,a_state,s_error)
end
first, last = nil, nil
elseif first then
if c_first == s_medi or c_first == s_fina then
- first[a_state] = s_isol
+ setprop(first,a_state,s_isol)
else
warning(first,"isol")
- first[a_state] = s_error
+ setprop(first,a_state,s_error)
end
first = nil
end
@@ -368,21 +390,21 @@ function methods.arab(head,font,attr)
current = end_of_math(current)
end
end
- current = current.next
+ current = getnext(current)
end
if last then
if c_last == s_medi or c_last == s_fina then
- last[a_state] = s_fina
+ setprop(last,a_state,s_fina)
else
warning(last,"fina")
- last[a_state] = s_error
+ setprop(last,a_state,s_error)
end
elseif first then
if c_first == s_medi or c_first == s_fina then
- first[a_state] = s_isol
+ setprop(first,a_state,s_isol)
else
warning(first,"isol")
- first[a_state] = s_error
+ setprop(first,a_state,s_error)
end
end
return head, done
diff --git a/Master/texmf-dist/tex/context/base/font-pat.lua b/Master/texmf-dist/tex/context/base/font-pat.lua
index 3ad37641ce3..04985379677 100644
--- a/Master/texmf-dist/tex/context/base/font-pat.lua
+++ b/Master/texmf-dist/tex/context/base/font-pat.lua
@@ -20,25 +20,25 @@ local patches = otf.enhancers.patches
local register = patches.register
local report = patches.report
-local function patch(data,filename)
- if data.design_size == 0 then
- local ds = match(file.basename(lower(filename)),"(%d+)")
- if ds then
- report("font %a has design size %a",filename,ds)
- data.design_size = tonumber(ds) * 10
- end
- end
-end
-
-register("after","migrate metadata","^lmroman", patch)
-register("after","migrate metadata","^lmsans", patch)
-register("after","migrate metadata","^lmtypewriter",patch)
+-- local function patch(data,filename)
+-- if not metadata.design_size or metadata.design_size == 0 then
+-- local ds = match(file.basename(lower(filename)),"(%d+)")
+-- if ds then
+-- report("font %a has design size %a",filename,ds)
+-- metadata.design_size = tonumber(ds) * 10
+-- end
+-- end
+-- end
+--
+-- register("after","migrate metadata","^lmroman", patch)
+-- register("after","migrate metadata","^lmsans", patch)
+-- register("after","migrate metadata","^lmtypewriter",patch)
-- For some reason (either it's a bug in the font, or it's a problem in the
-- library) the palatino arabic fonts don't have the mkmk features properly
-- set up.
-local function patch(data,filename)
+register("after","rehash features","^palatino.*arabic", function (data,filename)
local gpos = data.gpos
if gpos then
for k=1,#gpos do
@@ -61,9 +61,7 @@ local function patch(data,filename)
end
end
end
-end
-
-register("after","rehash features","palatino.*arabic",patch)
+end)
-- -- this code is now in lm-math.lfg
--
diff --git a/Master/texmf-dist/tex/context/base/font-pre.mkiv b/Master/texmf-dist/tex/context/base/font-pre.mkiv
index b03abed7d4c..fc6eb289e64 100644
--- a/Master/texmf-dist/tex/context/base/font-pre.mkiv
+++ b/Master/texmf-dist/tex/context/base/font-pre.mkiv
@@ -17,11 +17,15 @@
%D A basic set of features is defined here.
+% beware, base mode + dynamics can give weird effects
+
+% rlig ccmp
+
\definefontfeature
[always]
- [mode=auto,
- script=auto,
- kern=yes,
+ [mode=node, % we had 'auto', but let's try 'node' for a while and see what the impact is
+ script=auto, % on speed; 'base' just doesn't play well with dynamics; some day we can even
+ kern=yes, % consider skipping the base passes when no base mode is used
mark=yes,
mkmk=yes,
curs=yes]
@@ -48,6 +52,20 @@
tlig=yes,
trep=yes] % texligatures=yes,texquotes=yes
+\definefontfeature
+ [inlinenumbers]
+ [lnum=yes,
+ tnum=no]
+
+\definefontfeature
+ [tabularnumbers]
+ [tnum=yes,
+ lnum=no]
+
+\definefontfeature
+ [oldstylenumbers]
+ [onum=yes]
+
% \definefontfeature
% [newstyle]
% [onum=no]
@@ -59,6 +77,15 @@
tlig=yes,
trep=yes]
+\definefontfeature
+ [letterspacing]
+ [liga=no,
+ rlig=no,
+ clig=no,
+ dlig=no,
+ ccmp=yes,
+ keepligatures=auto]
+
\definefontfeature % can be used for type1 fonts
[complete]
[always]
@@ -72,28 +99,38 @@
[mode=none,
features=no]
-\definefontfeature % might move
- [arabic]
- [mode=node,language=dflt,script=arab,ccmp=yes,
+\definefontfeature
+ [semetic-complete]
+ [mode=node,analyze=yes,language=dflt,ccmp=yes,
init=yes,medi=yes,fina=yes,isol=yes,
- liga=yes,dlig=yes,rlig=yes,clig=yes,calt=yes,
- mark=yes,mkmk=yes,kern=yes,curs=yes]
+ mark=yes,mkmk=yes,kern=yes,curs=yes,
+ liga=yes,dlig=yes,rlig=yes,clig=yes,calt=yes]
+
+\definefontfeature
+ [semetic-simple]
+ [mode=node,analyze=yes,language=dflt,ccmp=yes,
+ init=yes,medi=yes,fina=yes,isol=yes,
+ mark=yes,mkmk=yes,kern=yes,curs=yes,
+ rlig=yes,calt=yes]
\definefontfeature
- [hebrew]
[arabic]
+ [semetic-complete]
+ [script=arab]
+
+\definefontfeature
+ [hebrew]
+ [semetic-complete]
[script=hebr]
-\definefontfeature % might move
+\definefontfeature
[simplearabic]
- [mode=node,language=dflt,script=arab,
- init=yes,medi=yes,fina=yes,
- rlig=yes,calt=yes,
- mark=yes,mkmk=yes,curs=yes]
+ [semetic-simple]
+ [script=arab]
\definefontfeature
[simplehebrew]
- [simplearabic]
+ [semetic-simple]
[script=hebr]
% \definefont [DevaOne] [file:chandas.ttf*devanagari-one at 12pt]
@@ -132,6 +169,30 @@
calt=yes,
kern=yes]
+\definefontfeature
+ [malayalam-one]
+ [mode=node,
+ language=dflt,
+ script=mlym,
+ akhn=yes,
+ blwf=yes,
+ half=yes,
+ pres=yes,
+ blws=yes,
+ psts=yes,
+ haln=no]
+
+\definefontfeature
+ [malayalam-two]
+ [malayalam-one]
+ [script=mlm2]
+
+\definefontfeature
+ [jamoforms]
+ [ljmo=yes,
+ tjmo=yes,
+ vjmo=yes]
+
% symbols:
\definefontfeature
@@ -151,6 +212,7 @@
trep=yes,
mathalternates=yes,
mathitalics=yes,
+ % mathgaps=yes,
% nomathitalics=yes, % don't pass to tex, might become default
language=dflt,
script=math]
@@ -195,6 +257,21 @@
[missing]
[missing=yes]
+%D Nice to have too:
+
+\definefontfeature
+ [quality]
+ [expansion=quality,
+ protrusion=quality]
+
+\definefontfeature
+ [slanted]
+ [slant=.2]
+
+\definefontfeature
+ [boldened]
+ [extend=1.2]
+
%D We define some colors that are used in tracing (for instance \OPENTYPE\
%D features). We cannot yet inherit because no colors are predefined.
@@ -497,6 +574,7 @@
\definealternativestyle [\v!Words] [{\setcharactercasing[\v!Words ]}] [{\setcharactercasing[\v!Words ]}]
\definealternativestyle [\v!capital] [{\setcharactercasing[\v!capital]}] [{\setcharactercasing[\v!capital]}]
\definealternativestyle [\v!Capital] [{\setcharactercasing[\v!Capital]}] [{\setcharactercasing[\v!Capital]}]
+\definealternativestyle [\v!mixed] [{\setcharactercasing[\v!mixed ]}] [{\setcharactercasing[\v!mixed ]}]
\definealternativestyle [\v!cap] [{\setcharactercasing[\v!cap ]}] [{\setcharactercasing[\v!cap ]}]
\definealternativestyle [\v!Cap] [{\setcharactercasing[\v!Cap ]}] [{\setcharactercasing[\v!Cap ]}]
@@ -505,12 +583,14 @@
\definefontfeature[f:smallcaps][smcp=yes]
\definefontfeature[f:oldstyle] [onum=yes]
+\definefontfeature[f:tabular] [tnum=yes]
\definealternativestyle [\v!smallcaps] [\setsmallcaps] [\setsmallcaps]
\definealternativestyle [\v!oldstyle] [\setoldstyle ] [\setoldstyle ]
\unexpanded\def\setsmallcaps{\doaddfeature{f:smallcaps}}
\unexpanded\def\setoldstyle {\doaddfeature{f:oldstyle}}
+\unexpanded\def\settabular {\doaddfeature{f:tabular}}
%D \macros
%D {tinyfont}
@@ -523,7 +603,7 @@
%D
%D For tracing purposes we define:
-\definefont[tinyfont][Mono at 1ex]
+\definefont[tinyfont][dejavusansmono at 1ex]
%D \macros
%D {infofont}
@@ -535,7 +615,7 @@
\let\infofont\relax % satisfy dep checker
-\definefont [infofont] [Mono at 6pt] % todo \the\everybodyfont
+\definefont[infofont][dejavusansmono at 6pt] % todo \the\everybodyfont
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/font-sel.lua b/Master/texmf-dist/tex/context/base/font-sel.lua
new file mode 100644
index 00000000000..86300c2dbcb
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/font-sel.lua
@@ -0,0 +1,712 @@
+if not modules then modules = { } end modules ['font-sel'] = {
+ version = 1.000,
+ comment = "companion to font-sel.mkvi",
+ author = "Wolfgang Schuster",
+ copyright = "Wolfgang Schuster",
+ license = "GNU General Public License"
+}
+
+local context = context
+local cleanname = fonts.names.cleanname
+local gsub, splitup, find = string.gsub, string.splitup, string.find
+local splitbase, removesuffix = file.splitbase, file.removesuffix
+local splitat, lpegmatch = lpeg.splitat, lpeg.match
+
+local formatters = string.formatters
+local settings_to_array = utilities.parsers.settings_to_array
+
+local v_yes = interfaces.variables.yes
+local v_simplefonts = interfaces.variables.simplefonts
+local v_selectfont = interfaces.variables.selectfont
+local v_default = interfaces.variables.default
+
+local selectfont = fonts.select or { }
+fonts.select = selectfont
+
+local data = selectfont.data or { }
+selectfont.data = data
+
+local fallbacks = selectfont.fallbacks or { }
+selectfont.fallbacks = fallbacks
+
+local methods = selectfont.methods or { }
+selectfont.methods = methods
+
+local getlookups = fonts.names.getlookups
+local registerdesignsizes = fonts.goodies.designsizes.register
+
+local alternatives = {
+ ["tf"] = "regular",
+ ["it"] = "italic",
+ ["sl"] = "slanted",
+ ["bf"] = "bold",
+ ["bi"] = "bolditalic",
+ ["bs"] = "boldslanted",
+ ["sc"] = "smallcaps",
+}
+
+local styles = {
+ ["rm"] = "serif",
+ ["ss"] = "sans",
+ ["tt"] = "mono",
+ ["hw"] = "handwriting",
+ ["cg"] = "calligraphy",
+ ["mm"] = "math",
+}
+
+local sizes = {
+ ["default"] = {
+ { 40, "4pt" },
+ { 50, "5pt" },
+ { 60, "6pt" },
+ { 70, "7pt" },
+ { 80, "8pt" },
+ { 90, "9pt" },
+ { 100, "10pt" },
+ { 110, "11pt" },
+ { 120, "12pt" },
+ { 144, "14.4pt" },
+ { 173, "17.3pt" },
+ },
+ ["dtp"] = {
+ { 50, "5pt" },
+ { 60, "6pt" },
+ { 70, "7pt" },
+ { 80, "8pt" },
+ { 90, "9pt" },
+ { 100, "10pt" },
+ { 110, "11pt" },
+ { 120, "12pt" },
+ { 130, "13pt" },
+ { 140, "14pt" },
+ { 160, "16pt" },
+ { 180, "18pt" },
+ { 220, "22pt" },
+ { 280, "28pt" },
+ }
+}
+
+local synonyms = {
+ ["rm"] = {
+ ["tf"] = "Serif",
+ ["it"] = "SerifItalic",
+ ["sl"] = "SerifSlanted",
+ ["bf"] = "SerifBold",
+ ["bi"] = "SerifBoldItalic",
+ ["bs"] = "SerifBoldSlanted",
+ ["sc"] = "SerifCaps",
+ },
+ ["ss"] = {
+ ["tf"] = "Sans",
+ ["it"] = "SansItalic",
+ ["sl"] = "SansSlanted",
+ ["bf"] = "SansBold",
+ ["bi"] = "SansBoldItalic",
+ ["bs"] = "SansBoldSlanted",
+ ["sc"] = "SansCaps",
+ },
+ ["tt"] = {
+ ["tf"] = "Mono",
+ ["it"] = "MonoItalic",
+ ["sl"] = "MonoSlanted",
+ ["bf"] = "MonoBold",
+ ["bi"] = "MonoBoldItalic",
+ ["bs"] = "MonoBoldSlanted",
+ ["sc"] = "MonoCaps",
+ },
+ ["hw"] = {
+ ["tf"] = "Handwriting",
+ },
+ ["cg"] = {
+ ["tf"] = "Calligraphy",
+ },
+ ["mm"] = {
+ ["tf"] = "MathRoman",
+ ["bf"] = "MathBold",
+ }
+}
+
+local replacement = {
+ ["style"] = {
+ ["it"] = "tf",
+ ["sl"] = "it",
+ ["bf"] = "tf",
+ ["bi"] = "bf",
+ ["bs"] = "bi",
+ ["sc"] = "tf",
+ },
+ ["weight"] = {
+ ["it"] = "tf",
+ ["sl"] = "tf",
+ ["bf"] = "tf",
+ ["bi"] = "bf",
+ ["bs"] = "bf",
+ ["sc"] = "tf",
+ },
+}
+
+local names = {
+ ["selectfont"] = { -- weight, style, width, variant, italic
+ ["regular"] = { weight = "normal", style = "normal", width = "normal", variant = "normal", italic = false },
+ ["italic"] = { weight = "normal", style = "italic", width = "normal", variant = "normal", italic = true },
+ ["slanted"] = { weight = "normal", style = "slanted", width = "normal", variant = "normal", italic = true },
+ ["medium"] = { weight = "medium", style = "normal", width = "normal", variant = "normal", italic = false },
+ ["mediumitalic"] = { weight = "medium", style = "italic", width = "normal", variant = "normal", italic = true },
+ ["mediumcaps"] = { weight = "medium", style = "normal", width = "normal", variant = "smallcaps", italic = true },
+ ["bold"] = { weight = "bold", style = "normal", width = "normal", variant = "normal", italic = false },
+ ["bolditalic"] = { weight = "bold", style = "italic", width = "normal", variant = "normal", italic = true },
+ ["boldslanted"] = { weight = "bold", style = "slanted", width = "normal", variant = "normal", italic = true },
+ ["smallcaps"] = { weight = "normal", style = "normal", width = "normal", variant = "smallcaps", italic = false },
+ },
+ ["simplefonts"] = {
+ ["light"] = { "lightregular", "light" },
+ ["lightitalic"] = { "lightitalic", "lightit", "lightoblique" },
+ ["lightcaps"] = { "smallcapslight" },
+ ["regular"] = { "roman", "regular", "book", "" },
+ ["italic"] = { "italic", "it", "oblique", "kursiv", "bookitalic", "bookit" },
+ ["medium"] = { "mediumregular", "medregular", "medium" },
+ ["mediumitalic"] = { "mediumitalic", "meditalic" },
+ ["mediumcaps"] = { "mediumcaps" },
+ ["bold"] = { "bold", "bd", "kraeftig", "mediumregular", "semibold", "demi" },
+ ["bolditalic"] = { "bolditalic", "boldit", "bdit", "boldoblique", "mediumitalic", "semibolditalic", "demiitalic" },
+ ["smallcaps"] = { "smallcaps", "capitals", "sc" },
+ ["heavy"] = { "heavyregular", "heavy" },
+ ["heavyitalic"] = { "heavyitalic" },
+ },
+ ["default"] = { -- weight, width, italic
+ ["thin"] = { weight = { 100, 200, 300, 400, 500 }, width = 5, italic = false },
+ ["thinitalic"] = { weight = { 100, 200, 300, 400, 500 }, width = 5, italic = true },
+ ["extralight"] = { weight = { 200, 100, 300, 400, 500 }, width = 5, italic = false },
+ ["extralightitalic"] = { weight = { 200, 100, 300, 400, 500 }, width = 5, italic = true },
+ ["light"] = { weight = { 300, 200, 100, 400, 500 }, width = 5, italic = false },
+ ["lightitalic"] = { weight = { 300, 200, 100, 400, 500 }, width = 5, italic = true },
+ ["regular"] = { weight = { 400, 500, 300, 200, 100 }, width = 5, italic = false },
+ ["italic"] = { weight = { 400, 500, 300, 200, 100 }, width = 5, italic = true },
+ ["medium"] = { weight = { 500, 400, 300, 200, 100 }, width = 5, italic = false },
+ ["mediumitalic"] = { weight = { 500, 400, 300, 200, 100 }, width = 5, italic = true },
+ ["demibold"] = { weight = { 600, 700, 800, 900 }, width = 5, italic = false },
+ ["demibolditalic"] = { weight = { 600, 700, 800, 900 }, width = 5, italic = true },
+ ["bold"] = { weight = { 700, 600, 800, 900 }, width = 5, italic = false },
+ ["bolditalic"] = { weight = { 700, 600, 800, 900 }, width = 5, italic = true },
+ ["extrabold"] = { weight = { 800, 900, 700, 600 }, width = 5, italic = false },
+ ["extrabolditalic"] = { weight = { 800, 900, 700, 600 }, width = 5, italic = true },
+ ["heavy"] = { weight = { 900, 800, 700, 600 }, width = 5, italic = false },
+ ["heavyitalic"] = { weight = { 900, 800, 700, 600 }, width = 5, italic = true },
+ }
+}
+
+-- simplefonts synonyms
+
+names.simplefonts.slanted = names.simplefonts.italic
+names.simplefonts.boldslanted = names.simplefonts.bolditalic
+
+-- default synonyms
+
+names.default.ultralight = names.default.extralight
+names.default.semibold = names.default.demibold
+names.default.ultrabold = names.default.extrabold
+names.default.black = names.default.heavy
+
+names.default.ultralightitalic = names.default.extralightitalic
+names.default.semibolditalic = names.default.demibolditalic
+names.default.ultrabolditalic = names.default.extrabolditalic
+names.default.blackitalic = names.default.heavyitalic
+
+names.default.thinslanted = names.default.thinitalic
+names.default.extralightslanted = names.default.extralightitalic
+names.default.ultralightslanted = names.default.extralightitalic
+names.default.lightslanted = names.default.lightitalic
+names.default.slanted = names.default.italic
+names.default.demiboldslanted = names.default.demibolditalic
+names.default.semiboldslanted = names.default.demibolditalic
+names.default.boldslanted = names.default.bolditalic
+names.default.extraboldslanted = names.default.extrabolditalic
+names.default.ultraboldslanted = names.default.extrabolditalic
+names.default.heavyslanted = names.default.heavyitalic
+names.default.blackslanted = names.default.heavyitalic
+
+names.default.smallcaps = names.default.regular
+
+local mathsettings = {
+ ["asanamath"] = {
+ extras = "asana-math",
+ goodies = {
+ ["tf"] = "anana-math",
+ },
+ features = {
+ ["tf"] = "math\\mathsizesuffix",
+ },
+ },
+ ["cambriamath"] = {
+ extras = "cambria-math",
+ goodies = {
+ ["tf"] = "cambria-math",
+ },
+ features = {
+ ["tf"] = "math\\mathsizesuffix",
+ },
+ },
+ ["neoeuler"] = {
+ extras = "euler-math",
+ features = {
+ ["tf"] = "math\\mathsizesuffix",
+ },
+ },
+ ["latinmodernmath"] = {
+ extras = "lm,lm-math",
+ goodies = {
+ ["tf"] = "lm",
+ },
+ features = {
+ ["tf"] = "math\\mathsizesuffix,lm-math",
+ },
+ },
+ ["lucidabrightmathot"] = {
+ extras = "lucida-opentype-math",
+ goodies = {
+ ["tf"] = "lucida-opentype-math",
+ },
+ features = {
+ ["tf"] = "math\\mathsizesuffix",
+ },
+ },
+ ["texgyrepagellamath"] = {
+ extras = "texgyre",
+ features = {
+ ["tf"] = "math\\mathsizesuffix",
+ },
+ },
+ ["texgyrebonummath"] = {
+ extras = "texgyre",
+ features = {
+ ["tf"] = "math\\mathsizesuffix",
+ },
+ },
+ ["texgyretermesmath"] = {
+ extras = "texgyre",
+ features = {
+ ["tf"] = "math\\mathsizesuffix",
+ },
+ },
+ ["xitsmath"] = {
+ extras = "xits-math",
+ goodies = {
+ ["tf"] = "xits-math",
+ },
+ features = {
+ ["tf"] = "math\\mathsizesuffix",
+ },
+ },
+}
+
+function commands.defineselectfont(settings)
+ local index = #data + 1
+ data[index] = settings
+ selectfont.searchfiles(index)
+ selectfont.filterinput(index)
+ context(index)
+end
+
+local function savefont(data,alternative,entries)
+ local f = data.fonts
+ if not f then
+ f = { }
+ data.fonts = f
+ end
+ f[alternative] = entries
+end
+
+local function savefeatures(data,alternative,entries)
+ local e = gsub(entries,"{(.*)}","%1")
+ local f = data.features
+ if not f then
+ f = { }
+ data.features = f
+ end
+ f[alternative] = e
+end
+
+local function savegoodies(data,alternative,entries)
+ local e = gsub(entries,"{(.*)}","%1")
+ local g = data.goodies
+ if not f then
+ g = { }
+ data.goodies = g
+ end
+ g[alternative] = e
+end
+
+methods[v_simplefonts] = function(data,alternative,style)
+ local family = data.metadata.family
+ local names = names["simplefonts"][style] or names["simplefonts"]["regular"]
+ for _, name in next, names do
+ local filename = cleanname(formatters["%s%s"](family,name))
+ local fullname = getlookups{ fullname = filename }
+ local fontname = getlookups{ fontname = filename }
+ local cleanfilename = getlookups{ cleanfilename = filename }
+ if #fullname > 0 then
+ savefont(data,alternative,fullname)
+ break
+ elseif #fontname > 0 then
+ savefont(data,alternative,fontname)
+ break
+ elseif #cleanfilename > 0 then
+ savefont(data,alternative,cleanfilename)
+ break
+ end
+ end
+end
+
+methods[v_default] = function(data,alternative,style)
+ local family = data.metadata.family
+ local spec = names["default"][style] or names["default"]["regular"]
+ local weights = spec["weight"]
+ for _, weight in next, weights do
+ local pattern = getlookups{
+ familyname = cleanname(family),
+ pfmweight = weight,
+ pfmwidth = spec["width"],
+ }
+ if #pattern > 0 then
+ local fontfiles = { }
+ for _, fontfile in next, pattern do
+ if (fontfile["angle"] and spec["italic"] == true) or (not fontfile["angle"] and spec["italic"] == false) then
+ fontfiles[#fontfiles + 1] = fontfile
+ end
+ end
+ savefont(data,alternative,fontfiles)
+ break
+ end
+ end
+end
+
+methods[v_selectfont] = function(data,alternative,style)
+ local family = data.metadata.family
+ local spec = names["selectfont"][style] or names["selectfont"]["regular"]
+ local pattern = getlookups{
+ familyname = cleanname(family),
+ weight = spec["weight"],
+ style = spec["style"],
+ width = spec["width"],
+ variant = spec["variant"]
+ }
+ if #pattern > 0 then
+ local fontfiles = { }
+ for _, fontfile in next, pattern do
+ if (fontfile["angle"] and spec["italic"] == true) or (not fontfile["angle"] and spec["italic"] == false) then
+ fontfiles[#fontfiles + 1] = fontfile
+ end
+ end
+ savefont(data,alternative,fontfiles)
+ end
+end
+
+methods["name"] = function(data,alternative,filename)
+ local data = data
+ local family = data.metadata.family
+ local filename = cleanname(gsub(filename,"*",family))
+ local fullname = getlookups{ fullname = filename }
+ local fontname = getlookups{ fontname = filename }
+ if #fullname > 0 then
+ savefont(data,alternative,fullname)
+ elseif #fontname > 0 then
+ savefont(data,alternative,fontname)
+ end
+end
+
+methods["file"] = function(data,alternative,filename)
+ local data = data
+ local family = data.metadata.family
+ local filename = gsub(removesuffix(filename),"*",family)
+ local filename = getlookups{ cleanfilename = cleanname(filename) }
+ if #filename > 0 then
+ savefont(data,alternative,filename)
+ end
+end
+
+methods["spec"] = function(data,alternative,filename)
+ local family = data.metadata.family
+ local weight, style, width, variant = splitup(filename,"-")
+ local pattern = getlookups{
+ familyname = cleanname(family),
+ weight = weight or "normal",
+ style = style or "normal",
+ width = width or "normal",
+ variant = variant or "normal",
+ }
+ if #pattern > 0 then
+ savefont(data,alternative,pattern)
+ end
+end
+
+methods["style"] = function(data,alternative,style)
+ local method = data.options.alternative or nil
+ (methods[method] or methods[v_default])(data,alternative,style)
+end
+
+methods["features"] = function(data,alternative,features)
+ savefeatures(data,alternative,features)
+end
+
+methods["goodies"] = function(data,alternative,goodies)
+ savegoodies(data,alternative,goodies)
+end
+
+function selectfont.searchfiles(index)
+ local data = data[index]
+ for alternative, _ in next, alternatives do
+ local filename = data.files[alternative]
+ local method = data.options.alternative
+ local family = data.metadata.family
+ local style = alternatives[alternative]
+ if filename == "" then
+ local pattern = getlookups{ familyname = cleanname(family) }
+ if #pattern == 1 and alternative == "tf" then -- needs to be improved
+ savefont(data,alternative,pattern)
+ else
+ (methods[method] or methods[v_default])(data,alternative,style)
+ end
+ else
+ method, filename = splitup(filename,":")
+ if not filename then
+ filename = method
+ method = "name"
+ end
+ (methods[method] or methods["name"])(data,alternative,filename)
+ end
+ end
+end
+
+function selectfont.filterinput(index)
+ local data = data[index]
+ local p = splitat(":",true)
+ for alternative, _ in next, alternatives do
+ local list = settings_to_array(data.alternatives[alternative])
+ for _, entry in next, list do
+ method, entries = lpegmatch(p,entry)
+ if not entries then
+ entries = method
+ method = "name"
+ end
+ (methods[method] or methods["name"])(data,alternative,entries)
+ end
+ end
+end
+
+local function definefontsynonym(data,alternative,index,fallback)
+ local fontdata = data.fonts and data.fonts[alternative]
+ local style = data.metadata.style
+ local typeface = data.metadata.typeface
+ local mathsettings = mathsettings[cleanname(data.metadata.family)]
+ local features = mathsettings and mathsettings["features"] and (mathsettings["features"][alternative] or mathsettings["features"]["tf"]) or data.features and data.features[alternative] or ""
+ local goodies = mathsettings and mathsettings["goodies"] and (mathsettings["goodies"] [alternative] or mathsettings["goodies"] ["tf"]) or data.goodies and data.goodies [alternative] or ""
+ local parent = replacement["style"][alternative] or ""
+ local fontname, fontfile, fontparent
+ if fallback then
+ fontname = formatters["%s-%s-%s-fallback-%s"](typeface, style, alternative, index)
+ fontfile = formatters["%s-%s-%s-%s"] (typeface, style, alternative, index)
+ fontparent = formatters["%s-%s-%s-fallback-%s"](typeface, style, parent, index)
+ else
+ fontname = synonyms[style][alternative]
+ fontfile = formatters["%s-%s-%s"](typeface, style, alternative)
+ fontparent = formatters["%s-%s-%s"](typeface, style, parent)
+ end
+ if fontdata and #fontdata > 0 then
+ for _, size in next, sizes["default"] do
+ for _, entry in next, fontdata do
+ if entry["minsize"] and entry["maxsize"] then
+ if size[1] > entry["minsize"] and size[1] <= entry["maxsize"] then
+ local filepath, filename = splitbase(entry["filename"])
+ registerdesignsizes( fontfile, size[2], filename )
+ end
+ end
+ end
+ end
+ for _, entry in next, fontdata do
+ local designsize = entry["designsize"] or 100
+ if designsize == 100 or designsize == 110 or designsize == 120 or designsize == 0 or #fontdata == 1 then
+ local filepath, filename = splitbase(entry["filename"])
+ registerdesignsizes( fontfile, "default", filename )
+ break
+ end
+ end
+ if fallback then
+ context.definefontsynonym( { fontname }, { fontfile }, { features = features } )
+ else
+ context.definefontsynonym( { fontname }, { fontfile }, { features = features, fallbacks = fontfile, goodies = goodies } )
+ end
+ else
+ if fallback then
+ context.definefontsynonym( { fontname }, { fontparent }, { features = features } )
+ else
+ context.definefontsynonym( { fontname }, { fontparent }, { features = features, fallbacks = fontfile, goodies = goodies } )
+ end
+ end
+end
+
+local function definetypescript(index)
+ local data = data[index]
+ local entry = data.fonts
+ local mathsettings = mathsettings[cleanname(data.metadata.family)]
+ local goodies = mathsettings and mathsettings.extras or data.options.goodies
+ local typeface = data.metadata.typeface
+ local style = data.metadata.style
+ if entry and entry["tf"] then
+ context.startfontclass( { typeface } )
+ if goodies ~= "" then
+ goodies = utilities.parsers.settings_to_array(goodies)
+ for _, goodie in next, goodies do
+ context.loadfontgoodies( { goodie } )
+ end
+ end
+ for alternative, _ in next, alternatives do
+ if synonyms[style][alternative] then -- prevent unnecessary synonyms for handwriting, calligraphy and math
+ definefontsynonym(data,alternative)
+ end
+ end
+ context.stopfontclass()
+ else
+ -- regular style not available, loading aborted
+ end
+end
+
+function selectfont.registerfallback(typeface,style,index)
+ local t = fallbacks[typeface]
+ if not t then
+ fallbacks[typeface] = { [style] = { index } }
+ else
+ local s = t[style]
+ if not s then
+ fallbacks[typeface][style] = { index }
+ else
+ fallbacks[typeface][style][#s+1] = index
+ end
+ end
+end
+
+local function definetextfontfallback(data,alternative,index)
+ local typeface = data.metadata.typeface
+ local style = data.metadata.style
+ local features = data.features[alternative]
+ local range = data.options.range
+ local rscale = data.options.scale ~= "" and data.options.scale or 1
+ local check = data.options.check ~= "" and data.options.check or "yes"
+ local force = data.options.force ~= "" and data.options.force or "yes"
+ local synonym = formatters["%s-%s-%s-fallback-%s"](typeface, style, alternative, index)
+ local fallback = formatters["%s-%s-%s"] (typeface, style, alternative)
+ if index == 1 then
+ context.resetfontfallback( { fallback } )
+ end
+ context.definefontfallback( { fallback }, { synonym }, { range }, { rscale = rscale, check = check, force = force } )
+end
+
+local function definetextfallback(entry,index)
+ local data = data[index]
+ local typeface = data.metadata.typeface
+ context.startfontclass( { typeface } )
+ for alternative, _ in next, alternatives do
+ definefontsynonym (data,alternative,entry,true)
+ definetextfontfallback(data,alternative,entry)
+ end
+ context.stopfontclass()
+ -- inspect(data)
+end
+
+local function definemathfontfallback(data,alternative,index)
+ local typeface = data.metadata.typeface
+ local style = data.metadata.style
+ local range = data.options.range
+ local rscale = data.options.scale ~= "" and data.options.scale or 1
+ local check = data.options.check ~= "" and data.options.check or "yes"
+ local force = data.options.force ~= "" and data.options.force or "yes"
+ local offset = data.options.offset
+ local features = data.features[alternative]
+ local fontdata = data.fonts and data.fonts[alternative]
+ local fallback = formatters["%s-%s-%s"](typeface, style, alternative)
+ if index == 1 then
+ context.resetfontfallback( { fallback } )
+ end
+ if fontdata and #fontdata > 0 then
+ for _, entry in next, fontdata do
+ local filename = entry["filename"]
+ local designsize = entry["designsize"] or 100
+ if designsize == 100 or designsize == 110 or designsize == 120 or designsize == 0 or #fontdata == 1 then
+ context.definefontfallback( { fallback }, { formatters["file:%s*%s"](filename,features) }, { range }, { rscale = rscale, check = check, force = force, offset = offset } )
+ break
+ end
+ end
+ end
+end
+
+local function definemathfallback(entry,index)
+ local data = data[index]
+ local typeface = data.metadata.typeface
+ local style = data.metadata.style
+ context.startfontclass( { typeface } )
+ for alternative, _ in next, alternatives do
+ if synonyms[style][alternative] then
+ definemathfontfallback(data,alternative,entry)
+ end
+ end
+ context.stopfontclass()
+ -- inspect(data)
+end
+
+local function definefallbackfont(index)
+ local data = data[index]
+ local f = fallbacks[data.metadata.typeface]
+ if f then
+ local s = f[data.metadata.style]
+ if s then
+ for entry, fallback in next, s do
+ if data.metadata.style == "mm" then
+ definemathfallback(entry,fallback)
+ else
+ definetextfallback(entry,fallback)
+ end
+ end
+ end
+ end
+end
+
+local function definetextfont(index)
+ local data = data[index]
+ local fontclass = data.metadata.typeface
+ local shortstyle = data.metadata.style
+ local style = styles[data.metadata.style]
+ local designsize = data.options.opticals == v_yes and "auto" or "default"
+ local scale = data.options.scale ~= "" and data.options.scale or 1
+ context.definetypeface( { fontclass }, { shortstyle }, { style }, { "" }, { "default" }, { designsize = designsize, rscale = scale } )
+end
+
+local function definemathfont(index)
+ local data = data[index]
+ local fontclass = data.metadata.typeface
+ local shortstyle = data.metadata.style
+ local style = styles[data.metadata.style]
+ local scale = data.options.scale ~= "" and data.options.scale or 1
+ local typescript = cleanname(data.metadata.family)
+ local entries = data.fonts
+ if entries then
+ context.definetypeface( { fontclass }, { shortstyle }, { style }, { "" }, { "default" }, { rscale = scale } )
+ else
+ context.definetypeface( { fontclass }, { shortstyle }, { style }, { typescript }, { "default" }, { rscale = scale } )
+ end
+end
+
+function selectfont.definetypeface(index)
+ local data = data[index]
+ if data.metadata.style == "mm" then
+ definefallbackfont(index)
+ definetypescript (index)
+ definemathfont (index)
+ else
+ definefallbackfont(index)
+ definetypescript (index)
+ definetextfont (index)
+ end
+ -- inspect(data)
+end
+
+commands.definefontfamily = selectfont.definetypeface
+commands.definefallbackfamily = selectfont.registerfallback
diff --git a/Master/texmf-dist/tex/context/base/font-sel.mkvi b/Master/texmf-dist/tex/context/base/font-sel.mkvi
new file mode 100644
index 00000000000..0b1d10c51c1
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/font-sel.mkvi
@@ -0,0 +1,374 @@
+%D \module
+%D [ file=font-sel,
+%D version=2014.03.10,
+%D title=\CONTEXT\ User Module,
+%D subtitle=Selectfont,
+%D author=Wolfgang Schuster,
+%D date=\currentdate,
+%D copyright=Wolfgang Schuster,
+%D license=GNU General Public License]
+
+\writestatus{loading}{ConTeXt User Module / Selectfont}
+
+\registerctxluafile{font-sel}{1.000}
+
+\unprotect
+
+\installcorenamespace {selectfont}
+\installsimplecommandhandler \??selectfont {selectfont}
+
+\unexpanded\def\selectfont_setparameters[#settings]%
+ {\begingroup
+ \setupcurrentselectfont[#settings]%
+ \edef\p_selectfont_preset{\selectfontparameter\c!preset}%
+ \ifx\p_selectfont_preset\empty \else
+ \processcommacommand[\p_selectfont_preset]\selectfont_preset_process
+ \setupcurrentselectfont[#settings]%
+ \fi
+ \setexpandedselectfontparameter\c!style {\expandnamespaceparameter\??selectfontstyle \selectfontparameter\c!style \s!rm }%
+ \setexpandedselectfontparameter\c!alternative{\expandnamespaceparameter\??selectfontalternative\selectfontparameter\c!alternative\v!default}%
+ \xdef\selectfont_index{\ctxcommand{
+ defineselectfont {
+ metadata = {
+ typeface = "\selectfontparameter\c!name",
+ style = "\selectfontparameter\c!style",
+ family = "\selectfontparameter\c!family",
+ },
+ options = {
+ opticals = "\selectfontparameter\c!opticalsize",
+ scale = "\selectfontparameter\c!scale",
+ goodies = "\selectfontparameter\c!goodies",
+ alternative = "\selectfontparameter\c!alternative",
+ range = "\selectfontparameter\c!range", % fallback only
+ offset = "\selectfontparameter\c!offset", % fallback only
+ check = "\selectfontparameter\c!check", % fallback only
+ force = "\selectfontparameter\c!force", % fallback only
+ },
+ alternatives = {
+ ["tf"] = "\selectfontparameter\s!tf",
+ ["bf"] = "\selectfontparameter\s!bf",
+ ["it"] = "\selectfontparameter\s!it",
+ ["sl"] = "\selectfontparameter\s!sl",
+ ["bi"] = "\selectfontparameter\s!bi",
+ ["bs"] = "\selectfontparameter\s!bs",
+ ["sc"] = "\selectfontparameter\s!sc",
+ },
+ files = {
+ ["tf"] = "\selectfontparameter\c!regularfont",
+ ["bf"] = "\selectfontparameter\c!boldfont",
+ ["it"] = "\selectfontparameter\c!italicfont",
+ ["sl"] = "\selectfontparameter\c!slantedfont",
+ ["bi"] = "\selectfontparameter\c!bolditalicfont",
+ ["bs"] = "\selectfontparameter\c!boldslantedfont",
+ ["sc"] = "\selectfontparameter\c!smallcapsfont",
+ },
+ features = {
+ ["tf"] = "\selectfontparameter\c!regularfeatures",
+ ["bf"] = "\selectfontparameter\c!boldfeatures",
+ ["it"] = "\selectfontparameter\c!italicfeatures",
+ ["sl"] = "\selectfontparameter\c!slantedfeatures",
+ ["bi"] = "\selectfontparameter\c!bolditalicfeatures",
+ ["bs"] = "\selectfontparameter\c!boldslantedfeatures",
+ ["sc"] = "\selectfontparameter\c!smallcapsfeatures",
+ }
+ }}}%
+ \endgroup}
+
+%D \macros
+%D {defineselectfontstyle}
+
+\installcorenamespace {selectfontstyle}
+
+\unexpanded\def\defineselectfontstyle
+ {\dodoubleargument\selectfont_style_define}
+
+\def\selectfont_style_define[#styles][#shortstyle]%
+ {\processcommalist[#styles]{\selectfont_style_define_indeed{#shortstyle}}}
+
+\def\selectfont_style_define_indeed#shortstyle#style%
+ {\setvalue{\??selectfontstyle#style}{#shortstyle}}
+
+\defineselectfontstyle [\s!rm,\s!serif] [\s!rm]
+\defineselectfontstyle [\s!ss,\s!sans] [\s!ss]
+\defineselectfontstyle [\s!tt,\s!mono] [\s!tt]
+\defineselectfontstyle [\s!hw,\s!handwriting] [\s!hw]
+\defineselectfontstyle [\s!cg,\s!calligraphy] [\s!cg]
+\defineselectfontstyle [\s!mm,\s!math] [\s!mm]
+
+%D \macros
+%D {definefontfamilypreset}
+
+\installcorenamespace {selectfontpreset}
+
+\unexpanded\def\definefontfamilypreset
+ {\dodoubleargument\selectfont_preset_define}
+
+\def\selectfont_preset_define[#name][#settings]%
+ {\doifassignmentelse{#settings}
+ {\setvalue{\??selectfontpreset#name}{\setupcurrentselectfont[#settings]}}
+ {\setvalue{\??selectfontpreset#name}{\csname\??selectfontpreset#settings\endcsname}}}
+
+\def\selectfont_preset_process#name%
+ {\ifcsname\??selectfontpreset#name\endcsname
+ \csname\??selectfontpreset#name\endcsname
+ \else
+ % unknown preset
+ \fi}
+
+%definefontfamilypreset [range:chinese] [\c!range={cjkcompatibilityforms,cjkcompatibilityideographs,cjkcompatibilityideographssupplement,cjkradicalssupplement,cjkstrokes,cjksymbolsandpunctuation,cjkunifiedideographs,cjkunifiedideographsextensiona,cjkunifiedideographsextensionb,halfwidthandfullwidthforms,verticalforms,bopomofo,bopomofoextended}]
+%definefontfamilypreset [range:japanese] [\c!range={cjkcompatibilityforms,cjkcompatibilityideographs,cjkcompatibilityideographssupplement,cjkradicalssupplement,cjkstrokes,cjksymbolsandpunctuation,cjkunifiedideographs,cjkunifiedideographsextensiona,cjkunifiedideographsextensionb,halfwidthandfullwidthforms,verticalforms,hiragana,katakana}]
+%definefontfamilypreset [range:korean] [\c!range={cjkcompatibilityforms,cjkcompatibilityideographs,cjkcompatibilityideographssupplement,cjkradicalssupplement,cjkstrokes,cjksymbolsandpunctuation,cjkunifiedideographs,cjkunifiedideographsextensiona,cjkunifiedideographsextensionb,halfwidthandfullwidthforms,verticalforms,hangulcompatibilityjamo,hanguljamo,hanguljamoextendeda,hanguljamoextendedb,hangulsyllables}]
+%definefontfamilypreset [range:cyrillic] [\c!range={cyrillic,cyrillicextendeda,cyrillicextendedb,cyrillicsupplement}]
+%definefontfamilypreset [range:greek] [\c!range={greekandcoptic,greekextended,ancientgreeknumbers}]
+
+\definefontfamilypreset [range:chinese] [\c!range={0x02E80-0x02EFF,0x03000-0x031EF,0x03300-0x09FFF,0x0F900-0x0FFEF,0x20000-0x2A6DF,0x2F800-0x2FA1F,0x03100-0x0312F,0x031A0-0x031BF}]
+\definefontfamilypreset [range:japanese] [\c!range={0x02E80-0x02EFF,0x03000-0x031EF,0x03300-0x09FFF,0x0F900-0x0FFEF,0x20000-0x2A6DF,0x2F800-0x2FA1F,0x03040-0x0309F,0x030A0-0x030FF}]
+\definefontfamilypreset [range:korean] [\c!range={0x02E80-0x02EFF,0x03000-0x031EF,0x03300-0x09FFF,0x0F900-0x0FFEF,0x20000-0x2A6DF,0x2F800-0x2FA1F,0x01100-0x011FF,0x03130-0x0318F,0x0A960-0x0D7FF}]
+\definefontfamilypreset [range:cyrillic] [\c!range={0x00400-0x0052F,0x02DE0-0x02DFF,0x0A640-0x0A69F}]
+\definefontfamilypreset [range:greek] [\c!range={0x00370-0x003FF,0x01F00-0x01FFF,0x10140-0x1018F}]
+\definefontfamilypreset [range:hebrew] [\c!range={0x00590-0x005FF,0x0FB00-0x0FB4F}]
+
+\definefontfamilypreset [math:digitsnormal] [\c!range=digitsnormal]
+\definefontfamilypreset [math:digitsbold] [\c!range=digitsnormal,\c!offset=digitsbold,\s!tf=style:bold]
+
+\definefontfamilypreset [math:uppercasenormal] [\c!range=uppercasenormal]
+\definefontfamilypreset [math:uppercaseitalic] [\c!range=uppercasenormal,\c!offset=uppercaseitalic, \s!tf=style:italic]
+\definefontfamilypreset [math:uppercasebold] [\c!range=uppercasenormal,\c!offset=uppercasebold, \s!tf=style:bold]
+\definefontfamilypreset [math:uppercasebolditalic] [\c!range=uppercasenormal,\c!offset=uppercasebolditalic,\s!tf=style:bolditalic]
+
+\definefontfamilypreset [math:lowercasenormal] [\c!range=lowercasenormal]
+\definefontfamilypreset [math:lowercaseitalic] [\c!range=lowercasenormal,\c!offset=lowercaseitalic, \s!tf=style:italic]
+\definefontfamilypreset [math:lowercasebold] [\c!range=lowercasenormal,\c!offset=lowercasebold, \s!tf=style:bold]
+\definefontfamilypreset [math:lowercasebolditalic] [\c!range=lowercasenormal,\c!offset=lowercasebolditalic,\s!tf=style:bolditalic]
+
+\definefontfamilypreset [math:mathematicaloperators] [\c!range=mathematicaloperators]
+
+\definefontfamilypreset [math:lowercasegreeknormal] [\c!range=lowercasegreeknormal]
+\definefontfamilypreset [math:lowercasegreekitalic] [\c!range=lowercasegreeknormal,\c!offset=lowercasegreekitalic, \s!tf=style:italic]
+\definefontfamilypreset [math:lowercasegreekbold] [\c!range=lowercasegreeknormal,\c!offset=lowercasegreekbold, \s!tf=style:bold]
+\definefontfamilypreset [math:lowercasegreekbolditalic] [\c!range=lowercasegreeknormal,\c!offset=lowercasegreekbolditalic,\s!tf=style:bolditalic]
+
+\definefontfamilypreset [math:uppercasegreeknormal] [\c!range=uppercasegreeknormal]
+\definefontfamilypreset [math:uppercasegreekitalic] [\c!range=uppercasegreeknormal,\c!offset=uppercasegreekitalic, \s!tf=style:italic]
+\definefontfamilypreset [math:uppercasegreekbold] [\c!range=uppercasegreeknormal,\c!offset=uppercasegreekbold, \s!tf=style:bold]
+\definefontfamilypreset [math:uppercasegreekbolditalic] [\c!range=uppercasegreeknormal,\c!offset=uppercasegreekbolditalic,\s!tf=style:bolditalic]
+
+
+%D \macros
+%D {defineselectfontalternative}
+%D
+%D The results between the old {\em simplefonts} and the new {\em selectfont}
+%D can be different because simplefonts the name entries in the database to find
+%D the styles for a font while selectfont the newer spec-method to the find the
+%D files for each style.
+%D
+%D The used method depends on the command one uses to load a font but it is
+%D also possible to switch between them with the {\em alternative} key, possible
+%D values are:
+%D
+%D \startitemize[packed]
+%D \startitem selectfont and \stopitem
+%D \startitem simplefonts. \stopitem
+%D \stopitemize
+
+\installcorenamespace {selectfontalternative}
+
+\unexpanded\def\defineselectfontalternative
+ {\dodoubleargument\selectfont_alternative_define}
+
+\def\selectfont_alternative_define[#name][#alternative]%
+ {\setvalue{\??selectfontalternative#name}{#alternative}}
+
+\defineselectfontalternative [\v!selectfont ] [\v!selectfont ]
+\defineselectfontalternative [\v!simplefonts] [\v!simplefonts]
+\defineselectfontalternative [\v!default ] [\v!default ]
+
+%D \macros
+%D {definefontfamily,definefallbackfamily}
+%D
+%D The \tex{definefontfamily} creates like \tex{definetypeface} a collection of font
+%D with different styles which can be later called with the \tex{setupbodyfont} command.
+%D
+%D The command takes three mendatory commands which are (a) the name of the fontclass,
+%D (b) the styles of the font and (c) the name of the font.
+%D
+%D \starttyping
+%D \definefontfamily [dejavu] [serif] [DejaVu Serif]
+%D \definefontfamily [dejavu] [sans] [DejaVu Sans]
+%D \definefontfamily [dejavu] [mono] [DejaVu Sans Mono]
+%D \definefontfamily [dejavu] [math] [XITS Math] [scale=1.1]
+%D
+%D \definefontfamily [office] [serif] [Times New Roman]
+%D \definefontfamily [office] [sans] [Arial] [scale=0.9]
+%D \definefontfamily [office] [mono] [Courier]
+%D \definefontfamily [office] [math] [TeX Gyre Termes Math]
+%D
+%D \definefontfamily [linux] [serif] [Linux Libertine O]
+%D \definefontfamily [linux] [sans] [Linux Biolinum O]
+%D \definefontfamily [linux] [mono] [Latin Modern Mono]
+%D \definefontfamily [linux] [math] [TeX Gyre Pagella Math] [scale=0.9]
+%D
+%D \setupbodyfont[dejavu]
+%D
+%D \starttext
+%D
+%D \rm Serif \ss Sans \tt Mono \m{1+2=3}
+%D
+%D \switchtobodyfont[office]
+%D
+%D \rm Serif 123 \ss Sans \tt Mono \m{1+2=3}
+%D
+%D \switchtobodyfont[linux]
+%D
+%D \rm Serif 123 \ss Sans \tt Mono \m{1+2=3}
+%D
+%D \stoptext
+%D \stoptyping
+%D
+%D When a document contains different languages and the global font lacks some characters
+%D for one language, one could set a different font where these characters are taken from.
+%D This fallback font (there can be more than one for a certain style) could be set with
+%D the \tex{definefallbackfamily} command which takes the same argument as
+%D the \tex{definefontfamily} command.
+%D
+%D \starttyping
+%D \definefallbackfamily [mainface] [serif] [DejaVu Serif] [range=cyrillic]
+%D \definefontfamily [mainface] [serif] [TeX Gyre Pagella]
+%D
+%D \setupbodyfont[mainface]
+%D
+%D \setuplanguage[en][patterns={us,ru}]
+%D
+%D \starttext
+%D
+%D \input knuth
+%D
+%D Традиционная систематика лишайников оказывается во многом условна и
+%D
+%D \stoptext
+%D \stoptyping
+%D
+%D Another feature of the module is the \type{opticalsize} key which allows one to enable
+%D optical sizes when they are a feature of the requested font.
+%D
+%D \starttyping
+%D \definefontfamily[mainface][serif][Latin Modern Roman][opticalsize=yes]
+%D
+%D \setupbodyfont[mainface]
+%D
+%D \starttext
+%D \scale[width=\textwidth]{\switchtobodyfont [6pt]\tf Regular, \it Italic \bf Bold and \bi BoldItalic}
+%D \scale[width=\textwidth]{\switchtobodyfont [8pt]\tf Regular, \it Italic \bf Bold and \bi BoldItalic}
+%D \scale[width=\textwidth]{\switchtobodyfont [10pt]\tf Regular, \it Italic \bf Bold and \bi BoldItalic}
+%D \scale[width=\textwidth]{\switchtobodyfont [12pt]\tf Regular, \it Italic \bf Bold and \bi BoldItalic}
+%D \scale[width=\textwidth]{\switchtobodyfont[17.3pt]\tf Regular, \it Italic \bf Bold and \bi BoldItalic}
+%D \stoptext
+%D \stoptyping
+
+% regularfont = … | * … | name:… | name:* … | file:… | file:* … | spec:…-…-… | style:medium
+
+\unexpanded\def\definefontfamily
+ {\doquadrupleempty\selectfont_family_define}
+
+\def\selectfont_family_define[#typeface][#style][#family][#settings]%
+ {\doifassignmentelse{#settings}
+ {\selectfont_setparameters[\c!name={#typeface},\c!style={#style},\c!family={#family},#settings]}
+ {\selectfont_setparameters[\c!name={#typeface},\c!style={#style},\c!family={#family},\c!preset={#settings}]}%
+ \ctxcommand{definefontfamily(\selectfont_index)}}
+
+\unexpanded\def\definefallbackfamily
+ {\doquadrupleempty\selectfont_fallback_define}
+
+\def\selectfont_fallback_define[#typeface][#style][#family][#settings]%
+ {\doifassignmentelse{#settings}
+ {\selectfont_setparameters[\c!name={#typeface},\c!style={#style},\c!family={#family},#settings]}
+ {\selectfont_setparameters[\c!name={#typeface},\c!style={#style},\c!family={#family},\c!preset={#settings}]}%
+ \edef\p_selectfont_style{\expandnamespacevalue\??selectfontstyle{#style}\s!rm}%
+ \ctxcommand{definefallbackfamily("#typeface","\p_selectfont_style",\selectfont_index)}}
+
+%D \macros
+%D {setupfontfamily,setupfallbackfamily}
+%D
+%D For simple documents which don’t need complex font settings one could use
+%D the \tex{setupfontfamily} command where the requested font is enabled immediately
+%D without the need to load it with \tex{setupbodyfont}. The downside of this method
+%D is that processing of the document takes longer with each additional font which
+%D is set with \tex{setupfontfamily}.
+%D
+%D \starttyping
+%D \setupfontfamily [serif] [DejaVu Serif]
+%D \setupfontfamily [sans] [DejaVu Sans]
+%D \setupfontfamily [mono] [DejaVu Sans Mono]
+%D \setupfontfamily [math] [XITS Math] [scale=1.1]
+%D
+%D \starttext
+%D
+%D \rm Serif 123 \ss Sans \tt Mono \m{1+2=3}
+%D
+%D \stoptext
+%D \stoptyping
+
+\newcount\c_selectfont_family
+\newtoks \t_selectfont_fallback
+\newtoks \t_selectfont_styles
+
+\unexpanded\def\setupfontfamily
+ {\dotripleempty\selectfont_family_setup}
+
+\def\selectfont_family_setup
+ {\ifsecondargument
+ \expandafter\selectfont_family_setup_yes
+ \else
+ \expandafter\selectfont_family_setup_nop
+ \fi}
+
+\def\selectfont_family_setup_yes[#style][#family][#settings]%
+ {\normalexpanded{\t_selectfont_styles{\selectfont_set_font_family[#style][#family][#settings]\the\t_selectfont_styles}}%
+ \selectfont_set_font_indeed}
+
+\def\selectfont_family_setup_nop[#settings][#dummya][#dummyb]%
+ {\setupselectfont[#settings]}
+
+\unexpanded\def\selectfont_set_default
+ {\selectfont_set_font_family[\v!serif][Latin Modern Roman][\c!opticalsize=\v!yes]%
+ \selectfont_set_font_family[\v!sans] [Latin Modern Sans] [\c!opticalsize=\v!yes]%
+ \selectfont_set_font_family[\v!mono] [Latin Modern Mono] [\c!opticalsize=\v!yes,\c!features=\s!none]}
+
+\unexpanded\def\setupfallbackfamily
+ {\dotripleempty\selectfont_fallback_setup}
+
+\def\selectfont_fallback_setup[#style][#family][#settings]%
+ {\normalexpanded{\t_selectfont_fallback{\the\t_selectfont_fallback\selectfont_set_font_fallback[#style][#family][#settings]}}}
+
+\def\selectfont_set_font_indeed
+ {\global\advance\c_selectfont_family\plusone
+ \edef\m_selectfont_typeface{\v!selectfont-\number\c_selectfont_family}%
+ \the\t_selectfont_fallback
+ \the\t_selectfont_styles
+ \selectfont_set_default
+ \setupbodyfont[\m_selectfont_typeface,\rootselectfontparameter\c!style]}
+
+\unexpanded\def\selectfont_set_font_family[#style]#dummy[#family]#dummy[#settings]%
+ {\ifcsname\m_selectfont_typeface#style\endcsname \else
+ \expandafter\let\csname\m_selectfont_typeface#style\endcsname\relax
+ \selectfont_family_define[\m_selectfont_typeface][#style][#family][#settings]%
+ \fi}
+
+\unexpanded\def\selectfont_set_font_fallback[#style]#dummy[#family]#dummy[#settings]%
+ {\selectfont_fallback_define[\m_selectfont_typeface][#style][#family][#settings]}
+
+%D You can apply a different feature set to each style of a font but if nothing
+%D is set the global features are used.
+
+\setupselectfont
+ [ \c!features=\s!default,
+ \c!regularfeatures=\selectfontparameter\c!features,
+ \c!boldfeatures=\selectfontparameter\c!features,
+ \c!italicfeatures=\selectfontparameter\c!features,
+ \c!slantedfeatures=\selectfontparameter\c!features,
+ \c!bolditalicfeatures=\selectfontparameter\c!features,
+ \c!boldslantedfeatures=\selectfontparameter\c!features,
+ \c!smallcapsfeatures=\s!smallcaps,
+ \c!style=\s!rm]
+
+\protect
\ No newline at end of file
diff --git a/Master/texmf-dist/tex/context/base/font-set.mkvi b/Master/texmf-dist/tex/context/base/font-set.mkvi
index 0e2058c1888..f94d6c86e0c 100644
--- a/Master/texmf-dist/tex/context/base/font-set.mkvi
+++ b/Master/texmf-dist/tex/context/base/font-set.mkvi
@@ -39,27 +39,36 @@
% \enablemode[lmmath]
+\let\m_font_fallback_name\empty
+
\def\font_preloads_reset_nullfont % this is needed because some macro packages (tikz) misuse \nullfont
{\dorecurse\plusseven{\fontdimen\recurselevel\nullfont\zeropoint}% keep en eye on this as:
\ctxcommand{resetnullfont()}% in luatex 0.70 this will also do the previous
\globallet\font_preloads_reset_nullfont\relax}
+\def\font_preload_check_mode
+ {\doifmodeelse{lmmath}
+ {\def\m_font_fallback_name{modern-designsize-virtual}}% this will stay
+ {\def\m_font_fallback_name{modern-designsize}}% % this might become 'modern'
+ \glet\font_preload_check_mode\relax}
+
\def\font_preload_default_fonts
{\font_preloads_reset
- \doifmodeelse{lmmath}
- {\setupbodyfont[modern-designsize-virtual,\fontstyle,\fontbody]}% this will stay
- {\setupbodyfont[modern-designsize,\fontstyle,\fontbody]}% % this might become 'modern'
- \showmessage\m!fonts6{fallback modern \fontstyle\normalspace\normalizedbodyfontsize}}
+ \font_preload_check_mode
+ \setupbodyfont[\m_font_fallback_name,\fontstyle,\fontbody]%
+ \showmessage\m!fonts6{fallback \m_font_fallback_name\space \fontstyle\normalspace\normalizedbodyfontsize}}
\def\font_preload_default_fonts_mm
- {\writestatus\m!fonts{preloading latin modern fonts (math)}%
- \definetypeface[\fontclass][\s!mm][\s!math][modern][\s!default]%
- \showmessage\m!fonts6{fallback modern mm \normalizedbodyfontsize}}
+ {\font_preload_check_mode
+ \writestatus\m!fonts{preloading \m_font_fallback_name\space (math)}%
+ \definetypeface[\fontclass][\s!mm][\s!math][\m_font_fallback_name][\s!default]%
+ \showmessage\m!fonts6{fallback \m_font_fallback_name\space mm \normalizedbodyfontsize}}
\def\font_preload_default_fonts_tt
- {\writestatus\m!fonts{preloading latin modern fonts (mono)}%
- \definetypeface[\fontclass][\s!tt][\s!mono][modern][\s!default]%
- \showmessage\m!fonts6{fallback modern tt \normalizedbodyfontsize}}
+ {\font_preload_check_mode
+ \writestatus\m!fonts{preloading \m_font_fallback_name\space (mono)}%
+ \definetypeface[\fontclass][\s!tt][\s!mono][\m_font_fallback_name][\s!default]%
+ \showmessage\m!fonts6{fallback \m_font_fallback_name\space tt \normalizedbodyfontsize}}
\def\font_preloads_reset
{\glet\font_preload_default_fonts \relax
diff --git a/Master/texmf-dist/tex/context/base/font-sol.lua b/Master/texmf-dist/tex/context/base/font-sol.lua
index db2dd24c248..a41e4a67946 100644
--- a/Master/texmf-dist/tex/context/base/font-sol.lua
+++ b/Master/texmf-dist/tex/context/base/font-sol.lua
@@ -48,19 +48,41 @@ local v_split = variables.split
local settings_to_array = utilities.parsers.settings_to_array
local settings_to_hash = utilities.parsers.settings_to_hash
-local find_node_tail = node.tail or node.slide
-local free_node = node.free
-local free_nodelist = node.flush_list
-local copy_nodelist = node.copy_list
-local traverse_nodes = node.traverse
-local traverse_ids = node.traverse_id
-local protect_glyphs = nodes.handlers.protectglyphs or node.protect_glyphs
-local hpack_nodes = node.hpack
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local repack_hlist = nodes.repackhlist
+local tasks = nodes.tasks
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getlist = nuts.getlist
+
+local find_node_tail = nuts.tail
+local free_node = nuts.free
+local free_nodelist = nuts.flush_list
+local copy_nodelist = nuts.copy_list
+local traverse_nodes = nuts.traverse
+local traverse_ids = nuts.traverse_id
+local hpack_nodes = nuts.hpack
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local protect_glyphs = nuts.protect_glyphs
+
+local repack_hlist = nuts.repackhlist
+
local nodes_to_utf = nodes.listtoutf
+----- protect_glyphs = nodes.handlers.protectglyphs
+
local setnodecolor = nodes.tracers.colors.set
local nodecodes = nodes.nodecodes
@@ -79,8 +101,7 @@ local localpar_code = whatsitcodes.localpar
local dir_code = whatsitcodes.dir
local userdefined_code = whatsitcodes.userdefined
-local nodepool = nodes.pool
-local tasks = nodes.tasks
+local nodepool = nuts.pool
local usernodeids = nodepool.userids
local new_textdir = nodepool.textdir
@@ -90,7 +111,7 @@ local new_leftskip = nodepool.leftskip
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
-local process_characters = nodes.handlers.characters
+----- process_characters = nodes.handlers.characters
local inject_kerns = nodes.injections.handler
local fonthashes = fonts.hashes
@@ -317,11 +338,12 @@ end)
function splitters.split(head)
-- quite fast
+ head = tonut(head)
local current, done, rlmode, start, stop, attribute = head, false, false, nil, nil, 0
cache, max_less, max_more = { }, 0, 0
local function flush() -- we can move this
- local font = start.font
- local last = stop.next
+ local font = getfont(start)
+ local last = getnext(stop)
local list = last and copy_nodelist(start,last) or copy_nodelist(start)
local n = #cache + 1
if encapsulate then
@@ -332,18 +354,18 @@ function splitters.split(head)
else
local current = start
while true do
- current[a_word] = n
+ setattr(current,a_word,n)
if current == stop then
break
else
- current = current.next
+ current = getnext(current)
end
end
end
if rlmode == "TRT" or rlmode == "+TRT" then
local dirnode = new_textdir("+TRT")
- list.prev = dirnode
- dirnode.next = list
+ setfield(list,"prev",dirnode)
+ setfield(dirnode,"next",list)
list = dirnode
end
local c = {
@@ -364,11 +386,11 @@ function splitters.split(head)
start, stop, done = nil, nil, true
end
while current do -- also nextid
- local next = current.next
- local id = current.id
+ local next = getnext(current)
+ local id = getid(current)
if id == glyph_code then
- if current.subtype < 256 then
- local a = current[a_split]
+ if getsubtype(current) < 256 then
+ local a = getattr(current,a_split)
if not a then
start, stop = nil, nil
elseif not start then
@@ -384,7 +406,7 @@ function splitters.split(head)
if start then
flush()
end
- elseif start and next and next.id == glyph_code and next.subtype < 256 then
+ elseif start and next and getid(next) == glyph_code and getsubtype(next) < 256 then
-- beware: we can cross future lines
stop = next
else
@@ -394,9 +416,9 @@ function splitters.split(head)
if start then
flush()
end
- local subtype = current.subtype
+ local subtype = getsubtype(current)
if subtype == dir_code or subtype == localpar_code then
- rlmode = current.dir
+ rlmode = getfield(current,"dir")
end
else
if start then
@@ -410,17 +432,17 @@ function splitters.split(head)
end
nofparagraphs = nofparagraphs + 1
nofwords = nofwords + #cache
- return head, done
+ return tonode(head), done
end
local function collect_words(list) -- can be made faster for attributes
local words, w, word = { }, 0, nil
if encapsulate then
for current in traverse_ids(whatsit_code,list) do
- if current.subtype == userdefined_code then -- hm
- local user_id = current.user_id
+ if getsubtype(current) == userdefined_code then -- hm
+ local user_id = getfield(current,"user_id")
if user_id == splitter_one then
- word = { current.value, current, current }
+ word = { getfield(current,"value"), current, current }
w = w + 1
words[w] = word
elseif user_id == splitter_two then
@@ -436,9 +458,9 @@ local function collect_words(list) -- can be made faster for attributes
local current, first, last, index = list, nil, nil, nil
while current do
-- todo: disc and kern
- local id = current.id
+ local id = getid(current)
if id == glyph_code or id == disc_code then
- local a = current[a_word]
+ local a = getattr(current,a_word)
if a then
if a == index then
-- same word
@@ -471,7 +493,7 @@ local function collect_words(list) -- can be made faster for attributes
report_splitters("skipped: %C",current.char)
end
end
- elseif id == kern_code and (current.subtype == fontkern_code or current[a_fontkern]) then
+ elseif id == kern_code and (getsubtype(current) == fontkern_code or getattr(current,a_fontkern)) then
if first then
last = current
else
@@ -489,7 +511,7 @@ local function collect_words(list) -- can be made faster for attributes
end
end
end
- current = current.next
+ current = getnext(current)
end
if index then
w = w + 1
@@ -520,8 +542,8 @@ local function doit(word,list,best,width,badness,line,set,listdir)
if found then
local h, t
if encapsulate then
- h = word[2].next -- head of current word
- t = word[3].prev -- tail of current word
+ h = getnext(word[2]) -- head of current word
+ t = getprev(word[3]) -- tail of current word
else
h = word[2]
t = word[3]
@@ -536,7 +558,7 @@ local function doit(word,list,best,width,badness,line,set,listdir)
ok = true
break
else
- c = c.next
+ c = getnext(c)
end
end
if not ok then
@@ -555,23 +577,24 @@ local function doit(word,list,best,width,badness,line,set,listdir)
local first = copy_nodelist(original)
if not trace_colors then
for n in traverse_nodes(first) do -- maybe fast force so no attr needed
- n[0] = featurenumber -- this forces dynamics
+ setattr(n,0,featurenumber) -- this forces dynamics
end
elseif set == "less" then
for n in traverse_nodes(first) do
setnodecolor(n,"font:isol") -- yellow
- n[0] = featurenumber
+ setattr(n,0,featurenumber)
end
else
for n in traverse_nodes(first) do
setnodecolor(n,"font:medi") -- green
- n[0] = featurenumber
+ setattr(n,0,featurenumber)
end
end
+first = tonode(first)
local font = found.font
local setdynamics = setfontdynamics[font]
if setdynamics then
- local processes = setdynamics(font,featurenumber)
+ local processes = setdynamics[featurenumber]
for i=1,#processes do -- often more than 1
first = processes[i](first,font,featurenumber)
end
@@ -579,20 +602,21 @@ local function doit(word,list,best,width,badness,line,set,listdir)
report_solutions("fatal error, no dynamics for font %a",font)
end
first = inject_kerns(first)
- if first.id == whatsit_code then
+first = tonut(first)
+ if getid(first) == whatsit_code then
local temp = first
- first = first.next
+ first = getnext(first)
free_node(temp)
end
local last = find_node_tail(first)
-- replace [u]h->t by [u]first->last
- local prev = h.prev
- local next = t.next
- prev.next = first
- first.prev = prev
+ local prev = getprev(h)
+ local next = getnext(t)
+ setfield(prev,"next",first)
+ setfield(first,"prev",prev)
if next then
- last.next = next
- next.prev = last
+ setfield(last,"next",next)
+ setfield(next,"prev",last)
end
-- check new pack
local temp, b = repack_hlist(list,width,'exactly',listdir)
@@ -601,22 +625,22 @@ local function doit(word,list,best,width,badness,line,set,listdir)
report_optimizers("line %a, badness before %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"quit")
end
-- remove last insert
- prev.next = h
- h.prev = prev
+ setfield(prev,"next",h)
+ setfield(h,"prev",prev)
if next then
- t.next = next
- next.prev = t
+ setfield(t,"next",next)
+ setfield(next,"prev",t)
else
- t.next = nil
+ setfield(t,"next",nil)
end
- last.next = nil
+ setfield(last,"next",nil)
free_nodelist(first)
else
if trace_optimize then
report_optimizers("line %a, badness before: %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"continue")
end
-- free old h->t
- t.next = nil
+ setfield(t,"next",nil)
free_nodelist(h) -- somhow fails
if not encapsulate then
word[2] = first
@@ -697,9 +721,9 @@ variants[v_random] = function(words,list,best,width,badness,line,set,listdir)
end
local function show_quality(current,what,line)
- local set = current.glue_set
- local sign = current.glue_sign
- local order = current.glue_order
+ local set = getfield(current,"glue_set")
+ local sign = getfield(current,"glue_sign")
+ local order = getfield(current,"glue_order")
local amount = set * ((sign == 2 and -1) or 1)
report_optimizers("line %a, category %a, amount %a, set %a, sign %a, how %a, order %a",line,what,amount,set,sign,how,order)
end
@@ -719,20 +743,25 @@ function splitters.optimize(head)
math.setrandomseedi(randomseed)
randomseed = nil
end
- local line = 0
- local tex_hbadness, tex_hfuzz = tex.hbadness, tex.hfuzz
- tex.hbadness, tex.hfuzz = 10000, number.maxdimen
+ local line = 0
+ local tex_hbadness = tex.hbadness
+ local tex_hfuzz = tex.hfuzz
+ tex.hbadness = 10000
+ tex.hfuzz = number.maxdimen
if trace_optimize then
report_optimizers("preroll %a, variant %a, criterium %a, cache size %a",preroll,variant,criterium,nc)
end
- for current in traverse_ids(hlist_code,head) do
- -- report_splitters("before: [%s] => %s",current.dir,nodes.tosequence(current.list,nil))
+ for current in traverse_ids(hlist_code,tonut(head)) do
line = line + 1
- local sign, dir, list, width = current.glue_sign, current.dir, current.list, current.width
- if not encapsulate and list.id == glyph_code then
+ local sign = getfield(current,"glue_sign")
+ local dir = getfield(current,"dir")
+ local width = getfield(current,"width")
+ local list = getlist(current)
+ if not encapsulate and getid(list) == glyph_code then
-- nasty .. we always assume a prev being there .. future luatex will always have a leftskip set
- -- current.list, list = insert_node_before(list,list,new_glue(0))
- current.list, list = insert_node_before(list,list,new_leftskip(0))
+ -- is this assignment ok ? .. needs checking
+ list = insert_node_before(list,list,new_leftskip(0)) -- new_glue(0)
+ setfield(current,"list",list)
end
local temp, badness = repack_hlist(list,width,'exactly',dir) -- it would be nice if the badness was stored in the node
if badness > 0 then
@@ -792,7 +821,7 @@ function splitters.optimize(head)
local words = collect_words(list)
for best=lastbest or 1,max do
local temp, done, changes, b = optimize(words,list,best,width,badness,line,set,dir)
- current.list = temp
+ setfield(current,"list",temp)
if trace_optimize then
report_optimizers("line %a, alternative %a, changes %a, badness %a",line,best,changes,b)
end
@@ -810,15 +839,16 @@ function splitters.optimize(head)
end
end
-- we pack inside the outer hpack and that way keep the original wd/ht/dp as bonus
- current.list = hpack_nodes(current.list,width,'exactly',listdir)
- -- report_splitters("after: [%s] => %s",temp.dir,nodes.tosequence(temp.list,nil))
+ local list = hpack_nodes(getlist(current),width,'exactly',listdir)
+ setfield(current,"list",list)
end
for i=1,nc do
local ci = cache[i]
free_nodelist(ci.original)
end
cache = { }
- tex.hbadness, tex.hfuzz = tex_hbadness, tex_hfuzz
+ tex.hbadness = tex_hbadness
+ tex.hfuzz = tex_hfuzz
stoptiming(splitters)
end
diff --git a/Master/texmf-dist/tex/context/base/font-sty.mkvi b/Master/texmf-dist/tex/context/base/font-sty.mkvi
index 3caa944887c..03fa598c2f2 100644
--- a/Master/texmf-dist/tex/context/base/font-sty.mkvi
+++ b/Master/texmf-dist/tex/context/base/font-sty.mkvi
@@ -172,6 +172,8 @@
%D The new one:
+\setfalse\fontattributeisset
+
\unexpanded\def\dousestyleparameter#value%
{\edef\currentstyleparameter{#value}%
\ifx\currentstyleparameter\empty\else
@@ -342,6 +344,9 @@
\font_styles_define_style_collection_a\s!default
\fi}
+\let\font_styles_define_style_collection_a\relax
+\let\font_styles_define_style_collection_b\relax
+
\unexpanded\def\definestyleinstance
{\doquadrupleargument\font_styles_define_style_instance}
@@ -404,5 +409,4 @@
\let\dostopattributes\endgroup
-
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/font-sym.mkvi b/Master/texmf-dist/tex/context/base/font-sym.mkvi
index e1d5332c479..c8ca49f7470 100644
--- a/Master/texmf-dist/tex/context/base/font-sym.mkvi
+++ b/Master/texmf-dist/tex/context/base/font-sym.mkvi
@@ -52,6 +52,10 @@
\let\v_font_string_d\s!Serif % default fontstyle (will be redefined in type-ini)
+\definefontsynonym
+ [CurrentFont]
+ [\noexpand\v_font_string_a\noexpand\v_font_string_c]
+
% potential generalization:
%
% \letvalue{\??fontfile:t:\s!rm}\s!Serif
diff --git a/Master/texmf-dist/tex/context/base/font-syn.lua b/Master/texmf-dist/tex/context/base/font-syn.lua
index 27176dade05..18ed46a2f88 100644
--- a/Master/texmf-dist/tex/context/base/font-syn.lua
+++ b/Master/texmf-dist/tex/context/base/font-syn.lua
@@ -12,13 +12,14 @@ local next, tonumber, type, tostring = next, tonumber, type, tostring
local sub, gsub, lower, match, find, lower, upper = string.sub, string.gsub, string.lower, string.match, string.find, string.lower, string.upper
local find, gmatch = string.find, string.gmatch
local concat, sort, format = table.concat, table.sort, string.format
-local serialize = table.serialize
+local serialize, sortedhash = table.serialize, table.sortedhash
local lpegmatch = lpeg.match
local unpack = unpack or table.unpack
-local formatters = string.formatters
+local formatters, topattern = string.formatters, string.topattern
local allocate = utilities.storage.allocate
local sparse = utilities.storage.sparse
+local setmetatableindex = table.setmetatableindex
local removesuffix = file.removesuffix
local splitbase = file.splitbase
@@ -34,38 +35,45 @@ local findfile = resolvers.findfile
local cleanpath = resolvers.cleanpath
local resolveresolved = resolvers.resolve
+local settings_to_hash = utilities.parsers.settings_to_hash_tolerant
+
local trace_names = false trackers.register("fonts.names", function(v) trace_names = v end)
local trace_warnings = false trackers.register("fonts.warnings", function(v) trace_warnings = v end)
local trace_specifications = false trackers.register("fonts.specifications", function(v) trace_specifications = v end)
-local report_names = logs.reporter("fonts","names")
+local report_names = logs.reporter("fonts","names")
--[[ldx--
This module implements a name to filename resolver. Names are resolved
using a table that has keys filtered from the font related files.
--ldx]]--
-fonts = fonts or { } -- also used elsewhere
+fonts = fonts or { } -- also used elsewhere
+
+local names = font.names or allocate { }
+fonts.names = names
-local names = font.names or allocate { }
-fonts.names = names
+local filters = names.filters or { }
+names.filters = filters
-local filters = names.filters or { }
-names.filters = filters
+local treatments = fonts.treatments or { }
+fonts.treatments = treatments
-names.data = names.data or allocate { }
+names.data = names.data or allocate { }
-names.version = 1.110
-names.basename = "names"
-names.saved = false
-names.loaded = false
-names.be_clever = true
-names.enabled = true
-names.cache = containers.define("fonts","data",names.version,true)
+names.version = 1.123
+names.basename = "names"
+names.saved = false
+names.loaded = false
+names.be_clever = true
+names.enabled = true
+names.cache = containers.define("fonts","data",names.version,true)
-local autoreload = true
+local usesystemfonts = true
+local autoreload = true
-directives.register("fonts.autoreload", function(v) autoreload = toboolean(v) end)
+directives.register("fonts.autoreload", function(v) autoreload = toboolean(v) end)
+directives.register("fonts.usesystemfonts", function(v) usesystemfonts = toboolean(v) end)
--[[ldx--
A few helpers.
@@ -73,7 +81,33 @@ directives.register("fonts.autoreload", function(v) autoreload = toboolean(v) en
local P, C, Cc, Cs = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cs
--- what to do with 'thin'
+-- -- what to do with these -- --
+--
+-- thin -> thin
+--
+-- regu -> regular -> normal
+-- norm -> normal -> normal
+-- stan -> standard -> normal
+-- medi -> medium
+-- ultr -> ultra
+-- ligh -> light
+-- heav -> heavy
+-- blac -> black
+-- thin
+-- book
+-- verylight
+--
+-- buch -> book
+-- buchschrift -> book
+-- halb -> demi
+-- halbfett -> demi
+-- mitt -> medium
+-- mittel -> medium
+-- fett -> bold
+-- mage -> light
+-- mager -> light
+-- nord -> normal
+-- gras -> normal
local weights = Cs ( -- not extra
P("demibold")
@@ -82,6 +116,7 @@ local weights = Cs ( -- not extra
+ P("ultrabold")
+ P("extrabold")
+ P("ultralight")
+ + P("extralight")
+ P("bold")
+ P("demi")
+ P("semi")
@@ -90,10 +125,22 @@ local weights = Cs ( -- not extra
+ P("heavy")
+ P("ultra")
+ P("black")
- + P("bol") -- / "bold"
+--+ P("bol") / "bold" -- blocks
+ + P("bol")
+ P("regular") / "normal"
)
+-- numeric_weights = {
+-- 200 = "extralight",
+-- 300 = "light",
+-- 400 = "book",
+-- 500 = "medium",
+-- 600 = "demi",
+-- 700 = "bold",
+-- 800 = "heavy",
+-- 900 = "black",
+-- }
+
local normalized_weights = sparse {
regular = "normal",
}
@@ -105,8 +152,9 @@ local styles = Cs (
+ P("oblique") / "italic"
+ P("slanted")
+ P("roman") / "normal"
- + P("ital") / "italic"
- + P("ita") / "italic"
+ + P("ital") / "italic" -- might be tricky
+ + P("ita") / "italic" -- might be tricky
+--+ P("obli") / "oblique"
)
local normalized_styles = sparse {
@@ -120,6 +168,7 @@ local widths = Cs(
+ P("thin")
+ P("expanded")
+ P("cond") / "condensed"
+--+ P("expa") / "expanded"
+ P("normal")
+ P("book") / "normal"
)
@@ -178,6 +227,28 @@ names.knownvariants = {
"smallcaps",
}
+local remappedweights = {
+ [""] = "normal",
+ ["bol"] = "bold",
+}
+
+local remappedstyles = {
+ [""] = "normal",
+}
+
+local remappedwidths = {
+ [""] = "normal",
+}
+
+local remappedvariants = {
+ [""] = "normal",
+}
+
+names.remappedweights = remappedweights setmetatableindex(remappedweights ,"self")
+names.remappedstyles = remappedstyles setmetatableindex(remappedstyles ,"self")
+names.remappedwidths = remappedwidths setmetatableindex(remappedwidths ,"self")
+names.remappedvariants = remappedvariants setmetatableindex(remappedvariants,"self")
+
local any = P(1)
local analyzed_table
@@ -232,19 +303,82 @@ filters.ttf = fontloader.info
filters.ttc = fontloader.info
filters.dfont = fontloader.info
-function fontloader.fullinfo(...) -- check with taco what we get / could get
+-- We had this as temporary solution because we needed a bit more info but in the
+-- meantime it got an interesting side effect: currently luatex delays loading of e.g.
+-- glyphs so here we first load and then discard which is a waste. In the past it did
+-- free memory because a full load was done. One of these things that goes unnoticed.
+--
+-- missing: names, units_per_em, design_range_bottom, design_range_top, design_size,
+-- pfminfo, top_side_bearing
+
+-- function fontloader.fullinfo(...) -- check with taco what we get / could get
+-- local ff = fontloader.open(...)
+-- if ff then
+-- local d = ff -- and fontloader.to_table(ff)
+-- d.glyphs, d.subfonts, d.gpos, d.gsub, d.lookups = nil, nil, nil, nil, nil
+-- fontloader.close(ff)
+-- return d
+-- else
+-- return nil, "error in loading font"
+-- end
+-- end
+
+-- Phillip suggested this faster variant but it's still a hack as fontloader.info should
+-- return these keys/values (and maybe some more) but at least we close the loader which
+-- might save some memory in the end.
+
+-- function fontloader.fullinfo(name)
+-- local ff = fontloader.open(name)
+-- if ff then
+-- local fields = table.tohash(fontloader.fields(ff),true)
+-- local d = {
+-- names = fields.names and ff.names,
+-- familyname = fields.familyname and ff.familyname,
+-- fullname = fields.fullname and ff.fullname,
+-- fontname = fields.fontname and ff.fontname,
+-- weight = fields.weight and ff.weight,
+-- italicangle = fields.italicangle and ff.italicangle,
+-- units_per_em = fields.units_per_em and ff.units_per_em,
+-- design_range_bottom = fields.design_range_bottom and ff.design_range_bottom,
+-- design_range_top = fields.design_range_top and ff.design_range_top,
+-- design_size = fields.design_size and ff.design_size,
+-- italicangle = fields.italicangle and ff.italicangle,
+-- pfminfo = fields.pfminfo and ff.pfminfo,
+-- top_side_bearing = fields.top_side_bearing and ff.top_side_bearing,
+-- }
+-- setmetatableindex(d,function(t,k)
+-- report_names("warning, trying to access field %a in font table of %a",k,name)
+-- end)
+-- fontloader.close(ff)
+-- return d
+-- else
+-- return nil, "error in loading font"
+-- end
+-- end
+
+-- As we have lazy loading anyway, this one still is full and with less code than
+-- the previous one. But this depends on the garbage collector to kick in.
+
+function fontloader.fullinfo(...)
local ff = fontloader.open(...)
if ff then
- local d = ff and fontloader.to_table(ff)
- d.glyphs, d.subfonts, d.gpos, d.gsub, d.lookups = nil, nil, nil, nil, nil
- fontloader.close(ff)
+ local d = { } -- ff is userdata so [1] or # fails on it
+ setmetatableindex(d,ff)
return d
else
return nil, "error in loading font"
end
end
+-- We don't get the design_* values here as for that the fontloader has to load feature
+-- info and therefore we're not much better off than using 'open'.
+--
+-- if tonumber(status.luatex_version) > 78 or (tonumber(status.luatex_version) == 78 and tonumber(status.luatex_revision) > 0) then
+-- fontloader.fullinfo = fontloader.info
+-- end
+
filters.otf = fontloader.fullinfo
+filters.ttf = fontloader.fullinfo
function filters.afm(name)
-- we could parse the afm file as well, and then report an error but
@@ -257,12 +391,12 @@ function filters.afm(name)
local f = io.open(name)
if f then
local hash = { }
- for line in f:lines() do
+ for line in f:lines() do -- slow
local key, value = match(line,"^(.+)%s+(.+)%s*$")
if key and #key > 0 then
hash[lower(key)] = value
end
- if find(line,"StartCharMetrics") then
+ if find(line,"StartCharMetrics",1,true) then
break
end
end
@@ -420,15 +554,17 @@ local function check_name(data,result,filename,modification,suffix,subfont)
-- prepare
local names = check_names(result)
-- fetch
- local familyname = names and names.preffamilyname or result.familyname
- local fullname = names and names.fullname or result.fullname
- local fontname = result.fontname
- local subfamily = names and names.subfamily
- local modifiers = names and names.prefmodifiers
- local weight = names and names.weight or result.weight
- local italicangle = tonumber(result.italicangle)
- local subfont = subfont or nil
- local rawname = fullname or fontname or familyname
+ local familyname = names and names.preffamilyname or result.familyname
+ local fullname = names and names.fullname or result.fullname
+ local fontname = result.fontname
+ local subfamily = names and names.subfamily
+ local modifiers = names and names.prefmodifiers
+ local weight = names and names.weight or result.weight
+ local italicangle = tonumber(result.italicangle)
+ local subfont = subfont or nil
+ local rawname = fullname or fontname or familyname
+ local filebase = removesuffix(basename(filename))
+ local cleanfilename = cleanname(filebase) -- for WS
-- normalize
familyname = familyname and cleanname(familyname)
fullname = fullname and cleanname(fullname)
@@ -458,28 +594,46 @@ local function check_name(data,result,filename,modification,suffix,subfont)
if not familyname then
familyname = a_name
end
- fontname = fontname or fullname or familyname or basename(filename)
+ fontname = fontname or fullname or familyname or filebase -- maybe cleanfilename
fullname = fullname or fontname
familyname = familyname or fontname
+ -- we do these sparse
+ local units = result.units_per_em or 1000 -- can be zero too
+ local minsize = result.design_range_bottom or 0
+ local maxsize = result.design_range_top or 0
+ local designsize = result.design_size or 0
+ local angle = result.italicangle or 0
+ local pfminfo = result.pfminfo
+ local pfmwidth = pfminfo and pfminfo.width or 0
+ local pfmweight = pfminfo and pfminfo.weight or 0
+ --
specifications[#specifications + 1] = {
- filename = filename, -- unresolved
- format = lower(suffix),
- subfont = subfont,
- rawname = rawname,
- familyname = familyname,
- fullname = fullname,
- fontname = fontname,
- subfamily = subfamily,
- modifiers = modifiers,
- weight = weight,
- style = style,
- width = width,
- variant = variant,
- minsize = result.design_range_bottom or 0,
- maxsize = result.design_range_top or 0,
- designsize = result.design_size or 0,
- modification = modification or 0,
+ filename = filename, -- unresolved
+ cleanfilename = cleanfilename,
+ format = lower(suffix),
+ subfont = subfont,
+ rawname = rawname,
+ familyname = familyname,
+ fullname = fullname,
+ fontname = fontname,
+ subfamily = subfamily,
+ modifiers = modifiers,
+ weight = weight,
+ style = style,
+ width = width,
+ variant = variant,
+ units = units ~= 1000 and units or nil,
+ pfmwidth = pfmwidth ~= 0 and pfmwidth or nil,
+ pfmweight = pfmweight ~= 0 and pfmweight or nil,
+ angle = angle ~= 0 and angle or nil,
+ minsize = minsize ~= 0 and minsize or nil,
+ maxsize = maxsize ~= 0 and maxsize or nil,
+ designsize = designsize ~= 0 and designsize or nil,
+ modification = modification ~= 0 and modification or nil,
}
+-- inspect(filename)
+-- inspect(result)
+-- inspect(specifications[#specifications])
end
local function cleanupkeywords()
@@ -502,10 +656,10 @@ local function cleanupkeywords()
local style = b_style or c_style or d_style or e_style or f_style or "normal"
local width = b_width or c_width or d_width or e_width or f_width or "normal"
local variant = b_variant or c_variant or d_variant or e_variant or f_variant or "normal"
- if not weight or weight == "" then weight = "normal" end
- if not style or style == "" then style = "normal" end
- if not width or width == "" then width = "normal" end
- if not variant or variant == "" then variant = "normal" end
+ weight = remappedweights [weight or ""]
+ style = remappedstyles [style or ""]
+ width = remappedwidths [width or ""]
+ variant = remappedvariants[variant or ""]
weights [weight ] = (weights [weight ] or 0) + 1
styles [style ] = (styles [style ] or 0) + 1
widths [width ] = (widths [width ] or 0) + 1
@@ -524,12 +678,22 @@ local function collectstatistics()
local data = names.data
local specifications = data.specifications
if specifications then
- local weights = { }
- local styles = { }
- local widths = { }
- local variants = { }
+ local f_w = formatters["%i"]
+ local f_a = formatters["%0.2f"]
+ -- normal stuff
+ local weights = { }
+ local styles = { }
+ local widths = { }
+ local variants = { }
+ -- weird stuff
+ local angles = { }
+ -- extra stuff
+ local pfmweights = { } setmetatableindex(pfmweights,"table")
+ local pfmwidths = { } setmetatableindex(pfmwidths, "table")
+ -- main loop
for i=1,#specifications do
- local s = specifications[i]
+ local s = specifications[i]
+ -- normal stuff
local weight = s.weight
local style = s.style
local width = s.width
@@ -538,13 +702,64 @@ local function collectstatistics()
if style then styles [style ] = (styles [style ] or 0) + 1 end
if width then widths [width ] = (widths [width ] or 0) + 1 end
if variant then variants[variant] = (variants[variant] or 0) + 1 end
+ -- weird stuff
+ local angle = f_a(tonumber(s.angle) or 0)
+ angles[angle] = (angles[angles] or 0) + 1
+ -- extra stuff
+ local pfmweight = f_w(s.pfmweight or 0)
+ local pfmwidth = f_w(s.pfmwidth or 0)
+ local tweights = pfmweights[pfmweight]
+ local twidths = pfmwidths [pfmwidth]
+ tweights[pfmweight] = (tweights[pfmweight] or 0) + 1
+ twidths[pfmwidth] = (twidths [pfmwidth] or 0) + 1
+ end
+ --
+ local stats = data.statistics
+ stats.weights = weights
+ stats.styles = styles
+ stats.widths = widths
+ stats.variants = variants
+ stats.angles = angles
+ stats.pfmweights = pfmweights
+ stats.pfmwidths = pfmwidths
+ stats.fonts = #specifications
+ --
+ setmetatableindex(pfmweights,nil)
+ setmetatableindex(pfmwidths, nil)
+ --
+ report_names("")
+ report_names("weights")
+ report_names("")
+ report_names(formatters[" %T"](weights))
+ report_names("")
+ report_names("styles")
+ report_names("")
+ report_names(formatters[" %T"](styles))
+ report_names("")
+ report_names("widths")
+ report_names("")
+ report_names(formatters[" %T"](widths))
+ report_names("")
+ report_names("variants")
+ report_names("")
+ report_names(formatters[" %T"](variants))
+ report_names("")
+ report_names("angles")
+ report_names("")
+ report_names(formatters[" %T"](angles))
+ report_names("")
+ report_names("pfmweights")
+ report_names("")
+ for k, v in sortedhash(pfmweights) do
+ report_names(formatters[" %-10s: %T"](k,v))
end
- local stats = data.statistics
- stats.weights = weights
- stats.styles = styles
- stats.widths = widths
- stats.variants = variants
- stats.fonts = #specifications
+ report_names("")
+ report_names("pfmwidths")
+ report_names("")
+ for k, v in sortedhash(pfmwidths) do
+ report_names(formatters[" %-10s: %T"](k,v))
+ end
+ report_names("")
end
end
@@ -608,8 +823,11 @@ local function checkduplicate(where) -- fails on "Romantik" but that's a border
local specifications = data.specifications
local loaded = { }
if specifications and mapping then
- for _, m in next, mapping do
- for k, v in next, m do
+ -- was: for _, m in sortedhash(mapping) do
+ local order = filters.list
+ for i=1,#order do
+ local m = mapping[order[i]]
+ for k, v in sortedhash(m) do
local s = specifications[v]
local hash = formatters["%s-%s-%s-%s-%s"](s.familyname,s.weight or "*",s.style or "*",s.width or "*",s.variant or "*")
local h = loaded[hash]
@@ -633,7 +851,7 @@ local function checkduplicate(where) -- fails on "Romantik" but that's a border
end
end
local n = 0
- for k, v in table.sortedhash(loaded) do
+ for k, v in sortedhash(loaded) do
local nv = #v
if nv > 1 then
if trace_warnings then
@@ -720,7 +938,7 @@ local function analyzefiles(olddata)
local oldindices = olddata and olddata.indices or { }
local oldspecifications = olddata and olddata.specifications or { }
local oldrejected = olddata and olddata.rejected or { }
- local treatmentdata = fonts.treatments.data
+ local treatmentdata = treatments.data or { } -- when used outside context
local function identify(completename,name,suffix,storedname)
local pathpart, basepart = splitbase(completename)
nofread = nofread + 1
@@ -796,9 +1014,10 @@ local function analyzefiles(olddata)
end
end
if result == nil then
- local result, message = filters[lower(suffix)](completename)
+ local lsuffix = lower(suffix)
+ local result, message = filters[lsuffix](completename)
if result then
- if result[1] then
+ if #result > 0 then
for r=1,#result do
local ok = check_name(data,result[r],storedname,modification,suffix,r-1) -- subfonts start at zero
-- if not ok then
@@ -868,7 +1087,9 @@ local function analyzefiles(olddata)
walk_tree(names.getpaths(trace),suffix,identify)
end
traverse("tree",withtree) -- TEXTREE only
- if texconfig.kpse_init then
+ if not usesystemfonts then
+ report_names("ignoring system fonts")
+ elseif texconfig.kpse_init then
traverse("lsr", withlsr)
else
traverse("system", withsystem)
@@ -954,12 +1175,13 @@ function names.identify(force)
analyzefiles(not force and names.readdata(names.basename))
rejectclashes()
collectfamilies()
- collectstatistics()
+ -- collectstatistics()
cleanupkeywords()
collecthashes()
checkduplicates()
addfilenames()
-- sorthashes() -- will be resorted when saved
+ collectstatistics()
report_names("total scan time %0.3f seconds",os.gettimeofday()-starttime)
end
@@ -1571,46 +1793,131 @@ end
local lastlookups, lastpattern = { }, ""
-function names.lookup(pattern,name,reload) -- todo: find
- if lastpattern ~= pattern then
- names.load(reload)
- local specifications = names.data.specifications
- local families = names.data.families
- local lookups = specifications
- if name then
- lookups = families[name]
- elseif not find(pattern,"=") then
- lookups = families[pattern]
+-- function names.lookup(pattern,name,reload) -- todo: find
+-- if lastpattern ~= pattern then
+-- names.load(reload)
+-- local specifications = names.data.specifications
+-- local families = names.data.families
+-- local lookups = specifications
+-- if name then
+-- lookups = families[name]
+-- elseif not find(pattern,"=",1,true) then
+-- lookups = families[pattern]
+-- end
+-- if trace_names then
+-- report_names("starting with %s lookups for %a",#lookups,pattern)
+-- end
+-- if lookups then
+-- for key, value in gmatch(pattern,"([^=,]+)=([^=,]+)") do
+-- local t, n = { }, 0
+-- if find(value,"*",1,true) then
+-- value = topattern(value)
+-- for i=1,#lookups do
+-- local s = lookups[i]
+-- if find(s[key],value) then
+-- n = n + 1
+-- t[n] = lookups[i]
+-- end
+-- end
+-- else
+-- for i=1,#lookups do
+-- local s = lookups[i]
+-- if s[key] == value then
+-- n = n + 1
+-- t[n] = lookups[i]
+-- end
+-- end
+-- end
+-- if trace_names then
+-- report_names("%s matches for key %a with value %a",#t,key,value)
+-- end
+-- lookups = t
+-- end
+-- end
+-- lastpattern = pattern
+-- lastlookups = lookups or { }
+-- end
+-- return #lastlookups
+-- end
+
+local function look_them_up(lookups,specification)
+ for key, value in next, specification do
+ local t, n = { }, 0
+ if find(value,"*",1,true) then
+ value = topattern(value)
+ for i=1,#lookups do
+ local s = lookups[i]
+ if find(s[key],value) then
+ n = n + 1
+ t[n] = lookups[i]
+ end
+ end
+ else
+ for i=1,#lookups do
+ local s = lookups[i]
+ if s[key] == value then
+ n = n + 1
+ t[n] = lookups[i]
+ end
+ end
end
if trace_names then
- report_names("starting with %s lookups for %a",#lookups,pattern)
+ report_names("%s matches for key %a with value %a",#t,key,value)
end
+ lookups = t
+ end
+ return lookups
+end
+
+local function first_look(name,reload)
+ names.load(reload)
+ local data = names.data
+ local specifications = data.specifications
+ local families = data.families
+ if name then
+ return families[name]
+ else
+ return specifications
+ end
+end
+
+function names.lookup(pattern,name,reload) -- todo: find
+ names.load(reload)
+ local data = names.data
+ local specifications = data.specifications
+ local families = data.families
+ local lookups = specifications
+ if name then
+ name = cleanname(name)
+ end
+ if type(pattern) == "table" then
+ local familyname = pattern.familyname
+ if familyname then
+ familyname = cleanname(familyname)
+ pattern.familyname = familyname
+ end
+ local lookups = first_look(name or familyname,reload)
if lookups then
- for key, value in gmatch(pattern,"([^=,]+)=([^=,]+)") do
- local t, n = { }, 0
- if find(value,"*") then
- value = string.topattern(value)
- for i=1,#lookups do
- local s = lookups[i]
- if find(s[key],value) then
- n = n + 1
- t[n] = lookups[i]
- end
- end
- else
- for i=1,#lookups do
- local s = lookups[i]
- if s[key] == value then
- n = n + 1
- t[n] = lookups[i]
- end
- end
- end
- if trace_names then
- report_names("%s matches for key %a with value %a",#t,key,value)
- end
- lookups = t
+ if trace_names then
+ report_names("starting with %s lookups for '%T'",#lookups,pattern)
+ end
+ lookups = look_them_up(lookups,pattern)
+ end
+ lastpattern = false
+ lastlookups = lookups or { }
+ elseif lastpattern ~= pattern then
+ local lookups = first_look(name or (not find(pattern,"=",1,true) and pattern),reload)
+ if lookups then
+ if trace_names then
+ report_names("starting with %s lookups for %a",#lookups,pattern)
end
+ local specification = settings_to_hash(pattern)
+ local familyname = specification.familyname
+ if familyname then
+ familyname = cleanname(familyname)
+ specification.familyname = familyname
+ end
+ lookups = look_them_up(lookups,specification)
end
lastpattern = pattern
lastlookups = lookups or { }
@@ -1722,3 +2029,49 @@ function names.resolvespec(askedname,sub) -- overloads previous definition
report_names("unresolved: %s",askedname)
end
end
+
+-- We could generate typescripts with designsize info from the name database but
+-- it's not worth the trouble as font names remain a mess: for instance how do we
+-- idenfity a font? Names, families, subfamilies or whatever snippet can contain
+-- a number related to the design size and so we end up with fuzzy logic again. So,
+-- instead it's easier to make a few goody files.
+--
+-- local hash = { }
+--
+-- for i=1,#specifications do
+-- local s = specifications[i]
+-- local min = s.minsize or 0
+-- local max = s.maxsize or 0
+-- if min ~= 0 or max ~= 0 then
+-- -- the usual name mess:
+-- -- antykwa has modifiers so we need to take these into account, otherwise we get weird combinations
+-- -- ebgaramond has modifiers with the size encoded, so we need to strip this in order to recognized similar styles
+-- -- lm has 'slanted appended in some names so how to choose that one
+-- --
+-- local modifier = string.gsub(s.modifiers or "normal","%d","")
+-- -- print funny modifier
+-- local instance = string.formatters["%s-%s-%s-%s-%s-%s"](s.familyname,s.width,s.style,s.weight,s.variant,modifier)
+-- local h = hash[instance]
+-- if not h then
+-- h = { }
+-- hash[instance] = h
+-- end
+-- size = string.formatters["%0.1fpt"]((min)/10)
+-- h[size] = s.filename
+-- end
+-- end
+--
+-- local newhash = { }
+--
+-- for k, v in next, hash do
+-- if next(v,next(v)) then
+-- -- local instance = string.match(k,"(.+)%-.+%-.+%-.+$")
+-- local instance = string.match(k,"(.+)%-.+%-.+$")
+-- local instance = string.gsub(instance,"%-normal$","")
+-- if not newhash[instance] then
+-- newhash[instance] = v
+-- end
+-- end
+-- end
+--
+-- inspect(newhash)
diff --git a/Master/texmf-dist/tex/context/base/font-tfm.lua b/Master/texmf-dist/tex/context/base/font-tfm.lua
index 316b947a36b..827d7058696 100644
--- a/Master/texmf-dist/tex/context/base/font-tfm.lua
+++ b/Master/texmf-dist/tex/context/base/font-tfm.lua
@@ -110,7 +110,7 @@ local function read_from_tfm(specification)
constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm)
if not features.encoding then
local encoding, filename = match(properties.filename,"^(.-)%-(.*)$") -- context: encoding-name.*
- if filename and encoding and encodings.known[encoding] then
+ if filename and encoding and encodings.known and encodings.known[encoding] then
features.encoding = encoding
end
end
diff --git a/Master/texmf-dist/tex/context/base/font-tra.mkiv b/Master/texmf-dist/tex/context/base/font-tra.mkiv
index 1877c4904d7..45d8a728059 100644
--- a/Master/texmf-dist/tex/context/base/font-tra.mkiv
+++ b/Master/texmf-dist/tex/context/base/font-tra.mkiv
@@ -186,6 +186,7 @@
\forgetparindent
\forgeteverypar
\tt
+ \lefttoright
\hbox to \hsize \bgroup
\hbox to 6\emwidth{\bf font\hss}%
\vtop \bgroup
diff --git a/Master/texmf-dist/tex/context/base/font-trt.lua b/Master/texmf-dist/tex/context/base/font-trt.lua
index 6fc8028d128..abc92ba5265 100644
--- a/Master/texmf-dist/tex/context/base/font-trt.lua
+++ b/Master/texmf-dist/tex/context/base/font-trt.lua
@@ -6,7 +6,11 @@ if not modules then modules = { } end modules ['font-trt'] = {
license = "see context related readme files"
}
-local rawget, dofile, next = rawget, dofile, next
+local rawget, dofile, next, type = rawget, dofile, next, type
+
+local cleanfilename = fonts.names.cleanfilename
+local splitbase = file.splitbase
+local lower = string.lower
--[[ldx--
We provide a simple treatment mechanism (mostly because I want to demonstrate
@@ -14,11 +18,24 @@ something in a manual). It's one of the few places where an lfg file gets loaded
outside the goodies manager.
--ldx]]--
-local treatments = utilities.storage.allocate()
-fonts.treatments = treatments
-local treatmentdata = { }
-treatments.data = treatmentdata
-treatments.filename = "treatments.lfg"
+local treatments = fonts.treatments or { }
+fonts.treatments = treatments
+
+local treatmentdata = treatments.data or utilities.storage.allocate()
+treatments.data = treatmentdata
+
+treatments.filename = "treatments.lfg"
+
+local trace_treatments = false trackers.register("fonts.treatments", function(v) trace_treatments = v end)
+local report_treatment = logs.reporter("fonts","treatment")
+
+treatments.report = report_treatment
+
+function treatments.trace(...)
+ if trace_treatments then
+ report_treatment(...)
+ end
+end
-- function treatments.load(name)
-- local filename = resolvers.findfile(name)
@@ -55,3 +72,45 @@ table.setmetatableindex(treatmentdata,function(t,k)
table.setmetatableindex(treatmentdata,nil)
return treatmentdata[k]
end)
+
+local function applyfix(fix,filename,data,n)
+ if type(fix) == "function" then
+ -- we assume that when needed the fix reports something
+ -- if trace_treatments then
+ -- report_treatment("applying treatment %a to file %a",n,filename)
+ -- end
+ fix(data)
+ elseif trace_treatments then
+ report_treatment("invalid treatment %a for file %a",n,filename)
+ end
+end
+
+function treatments.applyfixes(filename,data)
+ local filename = cleanfilename(filename)
+ local pathpart, basepart = splitbase(filename)
+ local treatment = treatmentdata[filename] or treatmentdata[basepart]
+ if treatment then
+ local fixes = treatment.fixes
+ if not fixes then
+ -- nothing to fix
+ elseif type(fixes) == "table" then
+ for i=1,#fixes do
+ applyfix(fixes[i],filename,data,i)
+ end
+ else
+ applyfix(fixes,filename,data,1)
+ end
+ end
+end
+
+function treatments.ignoredfile(fullname)
+ local treatmentdata = treatments.data or { } -- when used outside context
+ local _, basepart = splitbase(fullname)
+ local treatment = treatmentdata[basepart] or treatmentdata[lower(basepart)]
+ if treatment and treatment.ignored then
+ report_treatment("font file %a resolved as %a is ignored, reason %a",basepart,fullname,treatment.comment or "unknown")
+ return true
+ end
+end
+
+fonts.names.ignoredfile = treatments.ignoredfile
diff --git a/Master/texmf-dist/tex/context/base/font-var.mkvi b/Master/texmf-dist/tex/context/base/font-var.mkvi
index e50c2bad458..fb60b711cef 100644
--- a/Master/texmf-dist/tex/context/base/font-var.mkvi
+++ b/Master/texmf-dist/tex/context/base/font-var.mkvi
@@ -50,4 +50,7 @@
\let\fontsize \defaultfontsize
\let\fontface \!!zerocount
+% we can use an indirect mapping for fontclasses (map string onto numbers) and indeed this
+% is somewhat more efficient but also makes the code messy ... maybe some day ...
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/grph-fig.mkiv b/Master/texmf-dist/tex/context/base/grph-fig.mkiv
index 80b8e35d932..9b9333fa9a1 100644
--- a/Master/texmf-dist/tex/context/base/grph-fig.mkiv
+++ b/Master/texmf-dist/tex/context/base/grph-fig.mkiv
@@ -25,12 +25,21 @@
% \appendtoks \setbuffer[typeset-b]\endbuffer\to \everystarttext
% \appendtoks \setbuffer[typeset-a]\endbuffer\to \everystarttext
-\newcount\c_grph_buffers_n
+% we could use \typesetbuffer[*] to access the last one
-\let\m_grph_buffers_filename\empty
+\newconstant\c_grph_buffers_mode
+
+\let\lasttypesetbuffer\empty
\unexpanded\def\typesetbuffer
- {\dodoubleempty\grph_buffers_typeset}
+ {\bgroup
+ \setconstant\c_grph_buffers_mode\plusone
+ \dodoubleempty\grph_buffers_typeset}
+
+\unexpanded\def\typesetbufferonly
+ {\bgroup
+ \setconstant\c_grph_buffers_mode\zerocount
+ \dodoubleempty\grph_buffers_typeset}
\def\grph_buffers_typeset[#1][#2]% beware: this will mix up the mp graphics
{\ifsecondargument
@@ -44,11 +53,12 @@
\fi\fi}
\def\grph_buffers_typeset_indeed[#1][#2]% we could use the via files
- {\bgroup
- \global\advance\c_grph_buffers_n\plusone
- \edef\m_grph_buffers_filename{\jobname-buffer-\the\c_grph_buffers_n}%
- \ctxcommand{runbuffer("\m_grph_buffers_filename.tmp","#1",true)}%
- \externalfigure[\m_grph_buffers_filename.pdf][#2]%
+ {\doifnot{#1}{*}{\xdef\lasttypesetbuffer{\ctxcommand{runbuffer("#1",true)}}}%
+ \ifcase\c_grph_buffers_mode
+ % typesetonly
+ \or
+ \externalfigure[\lasttypesetbuffer][#2]%
+ \fi
\egroup}
% For manuals and such:
diff --git a/Master/texmf-dist/tex/context/base/grph-inc.lua b/Master/texmf-dist/tex/context/base/grph-inc.lua
index 9603419ae2e..392aa58b12e 100644
--- a/Master/texmf-dist/tex/context/base/grph-inc.lua
+++ b/Master/texmf-dist/tex/context/base/grph-inc.lua
@@ -38,7 +38,6 @@ run TeX code from within Lua. Some more functionality will move to Lua.
]]--
local format, lower, find, match, gsub, gmatch = string.format, string.lower, string.find, string.match, string.gsub, string.gmatch
-local texbox = tex.box
local contains = table.contains
local concat, insert, remove = table.concat, table.insert, table.remove
local todimen = string.todimen
@@ -55,6 +54,15 @@ local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
local replacetemplate = utilities.templates.replace
+local images = img
+
+local texgetbox = tex.getbox
+local texsetbox = tex.setbox
+
+local hpack = node.hpack
+
+local context = context
+
local variables = interfaces.variables
local codeinjections = backends.codeinjections
local nodeinjections = backends.nodeinjections
@@ -67,8 +75,6 @@ local trace_inclusion = false trackers.register("graphics.inclusion", functi
local report_inclusion = logs.reporter("graphics","inclusion")
-local context, img = context, img
-
local f_hash_part = formatters["%s->%s->%s"]
local f_hash_full = formatters["%s->%s->%s->%s->%s->%s->%s"]
@@ -82,7 +88,7 @@ local v_default = variables.default
local maxdimen = 2^30-1
-function img.check(figure)
+function images.check(figure)
if figure then
local width = figure.width
local height = figure.height
@@ -103,36 +109,38 @@ end
--- some extra img functions --- can become luat-img.lua
-local imgkeys = img.keys()
+local allimagekeys = images.keys()
-function img.totable(imgtable)
+local function imagetotable(imgtable)
local result = { }
- for k=1,#imgkeys do
- local key = imgkeys[k]
+ for k=1,#allimagekeys do
+ local key = allimagekeys[k]
result[key] = imgtable[key]
end
return result
end
-function img.serialize(i,...)
- return table.serialize(img.totable(i),...)
+images.totable = imagetotable
+
+function images.serialize(i,...)
+ return table.serialize(imagetotable(i),...)
end
-function img.print(i,...)
- return table.print(img.totable(i),...)
+function images.print(i,...)
+ return table.print(imagetotable(i),...)
end
-function img.clone(i,data)
+function images.clone(i,data)
i.width = data.width or i.width
i.height = data.height or i.height
-- attr etc
return i
end
-local validsizes = table.tohash(img.boxes())
-local validtypes = table.tohash(img.types())
+local validsizes = table.tohash(images.boxes())
+local validtypes = table.tohash(images.types())
-function img.checksize(size)
+function images.checksize(size)
if size then
size = gsub(size,"box","")
return validsizes[size] and size or "crop"
@@ -143,7 +151,7 @@ end
local indexed = { }
-function img.ofindex(n)
+function images.ofindex(n)
return indexed[n]
end
@@ -430,7 +438,7 @@ function figures.initialize(request)
request.height = h > 0 and h or nil
--
request.page = math.max(tonumber(request.page) or 1,1)
- request.size = img.checksize(request.size)
+ request.size = images.checksize(request.size)
request.object = request.object == v_yes
request["repeat"] = request["repeat"] == v_yes
request.preview = request.preview == v_yes
@@ -734,6 +742,9 @@ local function locate(request) -- name, format, cache
local pattern = figures_patterns[i]
if find(askedformat,pattern[1]) then
format = pattern[2]
+ if trace_figures then
+ report_inclusion("asked format %a matches %a",askedformat,pattern[1])
+ end
break
end
end
@@ -753,6 +764,7 @@ local function locate(request) -- name, format, cache
elseif quitscanning then
return register(askedname)
end
+ askedformat = format -- new per 2013-08-05
elseif trace_figures then
report_inclusion("unknown format %a",askedformat)
end
@@ -971,7 +983,7 @@ function figures.done(data)
figures.nofprocessed = figures.nofprocessed + 1
data = data or callstack[#callstack] or lastfiguredata
local dr, du, ds, nr = data.request, data.used, data.status, figures.boxnumber
- local box = texbox[nr]
+ local box = texgetbox(nr)
ds.width = box.width
ds.height = box.height
ds.xscale = ds.width /(du.width or 1)
@@ -983,7 +995,7 @@ end
function figures.dummy(data)
data = data or callstack[#callstack] or lastfiguredata
local dr, du, nr = data.request, data.used, figures.boxnumber
- local box = node.hpack(node.new("hlist")) -- we need to set the dir (luatex 0.60 buglet)
+ local box = hpack(node.new("hlist")) -- we need to set the dir (luatex 0.60 buglet)
du.width = du.width or figures.defaultwidth
du.height = du.height or figures.defaultheight
du.depth = du.depth or figures.defaultdepth
@@ -991,7 +1003,7 @@ function figures.dummy(data)
box.width = du.width
box.height = du.height
box.depth = du.depth
- texbox[nr] = box -- hm, should be global (to be checked for consistency)
+ texsetbox(nr,box) -- hm, should be global (to be checked for consistency)
end
-- -- -- generic -- -- --
@@ -1033,7 +1045,7 @@ function checkers.generic(data)
local hash = f_hash_full(name,page,size,color,conversion,resolution,mask)
local figure = figures_loaded[hash]
if figure == nil then
- figure = img.new {
+ figure = images.new {
filename = name,
page = page,
pagebox = dr.size,
@@ -1041,7 +1053,7 @@ function checkers.generic(data)
}
codeinjections.setfigurecolorspace(data,figure)
codeinjections.setfiguremask(data,figure)
- figure = figure and img.check(img.scan(figure)) or false
+ figure = figure and images.check(images.scan(figure)) or false
local f, d = codeinjections.setfigurealternative(data,figure)
figure, data = f or figure, d or data
figures_loaded[hash] = figure
@@ -1084,18 +1096,18 @@ function includers.generic(data)
if figure == nil then
figure = ds.private
if figure then
- figure = img.copy(figure)
- figure = figure and img.clone(figure,data.request) or false
+ figure = images.copy(figure)
+ figure = figure and images.clone(figure,data.request) or false
end
figures_used[hash] = figure
end
if figure then
local nr = figures.boxnumber
-- it looks like we have a leak in attributes here .. todo
- local box = node.hpack(img.node(figure)) -- img.node(figure) not longer valid
+ local box = hpack(images.node(figure)) -- images.node(figure) not longer valid
indexed[figure.index] = figure
box.width, box.height, box.depth = figure.width, figure.height, 0 -- new, hm, tricky, we need to do that in tex (yet)
- texbox[nr] = box
+ texsetbox(nr,box)
ds.objectnumber = figure.objnum
context.relocateexternalfigure()
end
diff --git a/Master/texmf-dist/tex/context/base/grph-inc.mkiv b/Master/texmf-dist/tex/context/base/grph-inc.mkiv
index 8557bbb0b85..e8b63cc4bb9 100644
--- a/Master/texmf-dist/tex/context/base/grph-inc.mkiv
+++ b/Master/texmf-dist/tex/context/base/grph-inc.mkiv
@@ -297,11 +297,12 @@
%
\edef\p_width {\externalfigureparameter\c!width}%
\edef\p_height{\externalfigureparameter\c!height}%
+ \edef\p_label {\externalfigureparameter\c!label}%
%
\dostarttagged\t!image\empty
\ctxlua{figures.push {
name = "\p_grph_include_name",
- label = "\p_grph_include_label",
+ label = "\ifx\p_label\empty\p_grph_include_label\else\p_label\fi",
page = "\externalfigureparameter\c!page",
size = "\externalfigureparameter\c!size",
object = "\externalfigureparameter\c!object",
@@ -535,17 +536,13 @@
\c!offset=\v!overlay,\c!width=\v!fit,\c!height=\v!fit]
{\blank[\v!disable]#1\endgraf\removelastskip}} % disable should stay here!
-\def\grph_include_process_mps#1% retrofit into mkii
- {\global\setbox\foundexternalfigure\vbox{\convertMPtoPDF{#1}11}}
+% used al lua end:
-\def\grph_include_process_cld#1%
- {\global\setbox\foundexternalfigure\vbox{\cldprocessfile{#1}}}
-
-\unexpanded\def\docheckfigurebuffer #1{\grph_include_process_tex{\getbuffer[#1]}} % used al lua end
-\unexpanded\def\docheckfiguretex #1{\grph_include_process_tex{\input{#1}}} % used al lua end
-\unexpanded\def\docheckfigurecld #1{\grph_include_process_cld{#1}} % used al lua end
-\unexpanded\def\docheckfiguremps #1{\grph_include_process_mps{#1}} % used al lua end
-\unexpanded\def\docheckfiguremprun #1#2{\grph_include_process_tex{\useMPrun{#1}{#2}}} % used al lua end
+\unexpanded\def\docheckfigurebuffer #1{\grph_include_process_tex{\getbuffer[#1]}}
+\unexpanded\def\docheckfiguretex #1{\grph_include_process_tex{\input{#1}}}
+\unexpanded\def\docheckfigurecld #1{\global\setbox\foundexternalfigure\vbox{\cldprocessfile{#1}}}
+\unexpanded\def\docheckfiguremps #1{\global\setbox\foundexternalfigure\vbox{\convertMPtoPDF{#1}11}}
+\unexpanded\def\docheckfiguremprun #1#2{\global\setbox\foundexternalfigure\vbox{\useMPrun{#1}{#2}}}
\unexpanded\def\relocateexternalfigure % easier here than in lua
{\global\setbox\foundexternalfigure\vbox to \ht\foundexternalfigure\bgroup
@@ -610,7 +607,9 @@
\letexternalfigureparameter\c!offset\v!overlay
\letexternalfigureparameter\c!width \figurewidth
\letexternalfigureparameter\c!height\figureheight
- \inheritedexternalfigureframed{\vfilll\box\foundexternalfigure}%
+% \letexternalfigureparameter\c!align \v!middle
+% \letexternalfigureparameter\c!autowidth\v!no
+ \inheritedexternalfigureframed{\box\foundexternalfigure}%
\fi
\fi\fi
\fi
diff --git a/Master/texmf-dist/tex/context/base/java-ini.lua b/Master/texmf-dist/tex/context/base/java-ini.lua
index 321e4e24d22..67337949465 100644
--- a/Master/texmf-dist/tex/context/base/java-ini.lua
+++ b/Master/texmf-dist/tex/context/base/java-ini.lua
@@ -6,30 +6,36 @@ if not modules then modules = { } end modules ['java-ini'] = {
license = "see context related readme files"
}
+-- todo: don't flush scripts if no JS key
+
local format = string.format
local concat = table.concat
local lpegmatch, P, S, C, Carg, Cc = lpeg.match, lpeg.P, lpeg.S, lpeg.C, lpeg.Carg, lpeg.Cc
-local allocate = utilities.storage.allocate
-local settings_to_array = utilities.parsers.settings_to_array
-local variables = interfaces.variables
-local formatters = string.formatters
+local allocate = utilities.storage.allocate
+local settings_to_array = utilities.parsers.settings_to_array
--- todo: don't flush scripts if no JS key
+local variables = interfaces.variables
+local formatters = string.formatters
+
+local context = context
+local commands = commands
-local trace_javascript = false trackers.register("backends.javascript", function(v) trace_javascript = v end)
+local trace_javascript = false trackers.register("backends.javascript", function(v) trace_javascript = v end)
local report_javascripts = logs.reporter ("interactions","javascripts")
local status_javascripts = logs.messenger("interactions","javascripts")
-interactions.javascripts = interactions.javascripts or { }
-local javascripts = interactions.javascripts
+local javascripts = interactions.javascripts or { }
+interactions.javascripts = javascripts
-javascripts.codes = allocate()
-javascripts.preambles = allocate()
-javascripts.functions = allocate()
+local codes = allocate()
+local preambles = allocate()
+local functions = allocate()
-local codes, preambles, functions = javascripts.codes, javascripts.preambles, javascripts.functions
+javascripts.codes = codes
+javascripts.preambles = preambles
+javascripts.functions = functions
local preambled = { }
diff --git a/Master/texmf-dist/tex/context/base/l-boolean.lua b/Master/texmf-dist/tex/context/base/l-boolean.lua
index f087f1a4ce2..8d11080e7fc 100644
--- a/Master/texmf-dist/tex/context/base/l-boolean.lua
+++ b/Master/texmf-dist/tex/context/base/l-boolean.lua
@@ -59,9 +59,9 @@ end
function string.is_boolean(str,default)
if type(str) == "string" then
- if str == "true" or str == "yes" or str == "on" or str == "t" then
+ if str == "true" or str == "yes" or str == "on" or str == "t" or str == "1" then
return true
- elseif str == "false" or str == "no" or str == "off" or str == "f" then
+ elseif str == "false" or str == "no" or str == "off" or str == "f" or str == "0" then
return false
end
end
diff --git a/Master/texmf-dist/tex/context/base/l-dir.lua b/Master/texmf-dist/tex/context/base/l-dir.lua
index 3d0576eeb58..25721206046 100644
--- a/Master/texmf-dist/tex/context/base/l-dir.lua
+++ b/Master/texmf-dist/tex/context/base/l-dir.lua
@@ -25,6 +25,9 @@ local isdir = lfs.isdir
local isfile = lfs.isfile
local currentdir = lfs.currentdir
local chdir = lfs.chdir
+local mkdir = lfs.mkdir
+
+local onwindows = os.type == "windows" or find(os.getenv("PATH"),";",1,true)
-- in case we load outside luatex
@@ -136,11 +139,33 @@ end
dir.collectpattern = collectpattern
-local pattern = Ct {
- [1] = (C(P(".") + P("/")^1) + C(R("az","AZ") * P(":") * P("/")^0) + Cc("./")) * V(2) * V(3),
- [2] = C(((1-S("*?/"))^0 * P("/"))^0),
- [3] = C(P(1)^0)
-}
+local separator
+
+if onwindows then -- we could sanitize here
+
+-- pattern = Ct {
+-- [1] = (C(P(".") + S("/\\")^1) + C(R("az","AZ") * P(":") * S("/\\")^0) + Cc("./")) * V(2) * V(3),
+-- [2] = C(((1-S("*?/\\"))^0 * S("/\\"))^0),
+-- [3] = C(P(1)^0)
+-- }
+
+ local slash = S("/\\") / "/"
+
+ pattern = Ct {
+ [1] = (Cs(P(".") + slash^1) + Cs(R("az","AZ") * P(":") * slash^0) + Cc("./")) * V(2) * V(3),
+ [2] = Cs(((1-S("*?/\\"))^0 * slash)^0),
+ [3] = Cs(P(1)^0)
+ }
+
+else -- assume unix
+
+ pattern = Ct {
+ [1] = (C(P(".") + P("/")^1) + Cc("./")) * V(2) * V(3),
+ [2] = C(((1-S("*?/"))^0 * P("/"))^0),
+ [3] = C(P(1)^0)
+ }
+
+end
local filter = Cs ( (
P("**") / ".*" +
@@ -164,7 +189,7 @@ local function glob(str,t)
local split = lpegmatch(pattern,str) -- we could use the file splitter
if split then
local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
+ local recurse = find(base,"**",1,true) -- find(base,"%*%*")
local start = root .. path
local result = lpegmatch(filter,start .. base)
globpattern(start,result,recurse,t)
@@ -190,7 +215,7 @@ local function glob(str,t)
local t = t or { }
local action = action or function(name) t[#t+1] = name end
local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
+ local recurse = find(base,"**",1,true) -- find(base,"%*%*")
local start = root .. path
local result = lpegmatch(filter,start .. base)
globpattern(start,result,recurse,action)
@@ -257,25 +282,32 @@ end
local make_indeed = true -- false
-local onwindows = os.type == "windows" or find(os.getenv("PATH"),";")
-
if onwindows then
function dir.mkdirs(...)
- local str, pth = "", ""
- for i=1,select("#",...) do
- local s = select(i,...)
- if s == "" then
- -- skip
- elseif str == "" then
- str = s
- else
- str = str .. "/" .. s
+ local n = select("#",...)
+ local str
+ if n == 1 then
+ str = select(1,...)
+ if isdir(str) then
+ return str, true
+ end
+ else
+ str = ""
+ for i=1,n do
+ local s = select(i,...)
+ if s == "" then
+ -- skip
+ elseif str == "" then
+ str = s
+ else
+ str = str .. "/" .. s
+ end
end
end
- local first, middle, last
+ local pth = ""
local drive = false
- first, middle, last = match(str,"^(//)(//*)(.*)$")
+ local first, middle, last = match(str,"^(//)(//*)(.*)$")
if first then
-- empty network path == local path
else
@@ -309,7 +341,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -330,14 +362,23 @@ if onwindows then
else
function dir.mkdirs(...)
- local str, pth = "", ""
- for i=1,select("#",...) do
- local s = select(i,...)
- if s and s ~= "" then -- we catch nil and false
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
+ local n = select("#",...)
+ local str, pth
+ if n == 1 then
+ str = select(1,...)
+ if isdir(str) then
+ return str, true
+ end
+ else
+ str = ""
+ for i=1,n do
+ local s = select(i,...)
+ if s and s ~= "" then -- we catch nil and false
+ if str ~= "" then
+ str = str .. "/" .. s
+ else
+ str = s
+ end
end
end
end
@@ -352,7 +393,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
else
@@ -360,7 +401,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/l-file.lua b/Master/texmf-dist/tex/context/base/l-file.lua
index a64ee86564c..ebb2b39f4a2 100644
--- a/Master/texmf-dist/tex/context/base/l-file.lua
+++ b/Master/texmf-dist/tex/context/base/l-file.lua
@@ -368,11 +368,14 @@ function file.joinpath(tab,separator) -- table
return tab and concat(tab,separator or io.pathseparator) -- can have trailing //
end
+local someslash = S("\\/")
local stripper = Cs(P(fwslash)^0/"" * reslasher)
-local isnetwork = fwslash * fwslash * (1-fwslash) + (1-fwslash-colon)^1 * colon
+local isnetwork = someslash * someslash * (1-someslash)
+ + (1-fwslash-colon)^1 * colon
local isroot = fwslash^1 * -1
local hasroot = fwslash^1
+local reslasher = lpeg.replacer(S("\\/"),"/")
local deslasher = lpeg.replacer(S("\\/")^1,"/")
-- If we have a network or prefix then there is a change that we end up with two
@@ -386,8 +389,13 @@ function file.join(...)
local lst = { ... }
local one = lst[1]
if lpegmatch(isnetwork,one) then
+ local one = lpegmatch(reslasher,one)
local two = lpegmatch(deslasher,concat(lst,"/",2))
- return one .. "/" .. two
+ if lpegmatch(hasroot,two) then
+ return one .. two
+ else
+ return one .. "/" .. two
+ end
elseif lpegmatch(isroot,one) then
local two = lpegmatch(deslasher,concat(lst,"/",2))
if lpegmatch(hasroot,two) then
@@ -412,6 +420,8 @@ end
-- print(file.join("http://a","/y"))
-- print(file.join("http:///a","/y"))
-- print(file.join("//nas-1","/y"))
+-- print(file.join("//nas-1/a/b/c","/y"))
+-- print(file.join("\\\\nas-1\\a\\b\\c","\\y"))
-- The previous one fails on "a.b/c" so Taco came up with a split based
-- variant. After some skyping we got it sort of compatible with the old
@@ -421,9 +431,14 @@ end
-- finds were replaced by lpegs.
local drivespec = R("az","AZ")^1 * colon
-local anchors = fwslash + drivespec
-local untouched = periods + (1-period)^1 * P(-1)
-local splitstarter = (Cs(drivespec * (bwslash/"/" + fwslash)^0) + Cc(false)) * Ct(lpeg.splitat(S("/\\")^1))
+local anchors = fwslash
+ + drivespec
+local untouched = periods
+ + (1-period)^1 * P(-1)
+local mswindrive = Cs(drivespec * (bwslash/"/" + fwslash)^0)
+local mswinuncpath = (bwslash + fwslash) * (bwslash + fwslash) * Cc("//")
+local splitstarter = (mswindrive + mswinuncpath + Cc(false))
+ * Ct(lpeg.splitat(S("/\\")^1))
local absolute = fwslash
function file.collapsepath(str,anchor) -- anchor: false|nil, true, "."
@@ -490,6 +505,7 @@ end
-- test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..")
-- test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..")
-- test("./a")
+-- test([[\\a.b.c\d\e]])
local validchars = R("az","09","AZ","--","..")
local pattern_a = lpeg.replacer(1-validchars)
diff --git a/Master/texmf-dist/tex/context/base/l-io.lua b/Master/texmf-dist/tex/context/base/l-io.lua
index 06e1fb5efc8..020e811bf09 100644
--- a/Master/texmf-dist/tex/context/base/l-io.lua
+++ b/Master/texmf-dist/tex/context/base/l-io.lua
@@ -12,7 +12,7 @@ local concat = table.concat
local floor = math.floor
local type = type
-if string.find(os.getenv("PATH"),";") then
+if string.find(os.getenv("PATH"),";",1,true) then
io.fileseparator, io.pathseparator = "\\", ";"
else
io.fileseparator, io.pathseparator = "/" , ":"
@@ -35,6 +35,7 @@ local function readall(f)
return f:read('*all')
else
local done = f:seek("set",0)
+ local step
if size < 1024*1024 then
step = 1024 * 1024
elseif size > 16*1024*1024 then
@@ -59,7 +60,7 @@ io.readall = readall
function io.loaddata(filename,textmode) -- return nil if empty
local f = io.open(filename,(textmode and 'r') or 'rb')
if f then
--- local data = f:read('*all')
+ -- local data = f:read('*all')
local data = readall(f)
f:close()
if #data > 0 then
diff --git a/Master/texmf-dist/tex/context/base/l-lpeg.lua b/Master/texmf-dist/tex/context/base/l-lpeg.lua
index 323c73b6967..6feb7089c74 100644
--- a/Master/texmf-dist/tex/context/base/l-lpeg.lua
+++ b/Master/texmf-dist/tex/context/base/l-lpeg.lua
@@ -6,6 +6,10 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
license = "see context related readme files"
}
+-- lpeg 12 vs lpeg 10: slower compilation, similar parsing speed (i need to check
+-- if i can use new features like capture / 2 and .B (at first sight the xml
+-- parser is some 5% slower)
+
-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
-- move utf -> l-unicode
@@ -13,6 +17,20 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
lpeg = require("lpeg")
+-- The latest lpeg doesn't have print any more, and even the new ones are not
+-- available by default (only when debug mode is enabled), which is a pitty as
+-- as it helps nailign down bottlenecks. Performance seems comparable: some 10%
+-- slower pattern compilation, same parsing speed, although,
+--
+-- local p = lpeg.C(lpeg.P(1)^0 * lpeg.P(-1))
+-- local a = string.rep("123",100)
+-- lpeg.match(p,a)
+--
+-- seems slower and is also still suboptimal (i.e. a match that runs from begin
+-- to end, one of the cases where string matchers win).
+
+if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end
+
-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
-- some code will move to unicode and string
@@ -61,7 +79,9 @@ local lpegtype, lpegmatch, lpegprint = lpeg.type, lpeg.match, lpeg.print
-- let's start with an inspector:
-setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+if setinspector then
+ setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+end
-- Beware, we predefine a bunch of patterns here and one reason for doing so
-- is that we get consistent behaviour in some of the visualizers.
@@ -69,7 +89,6 @@ setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
lpeg.patterns = lpeg.patterns or { } -- so that we can share
local patterns = lpeg.patterns
-
local anything = P(1)
local endofstring = P(-1)
local alwaysmatched = P(true)
@@ -79,37 +98,59 @@ patterns.endofstring = endofstring
patterns.beginofstring = alwaysmatched
patterns.alwaysmatched = alwaysmatched
-local digit, sign = R('09'), S('+-')
+local sign = S('+-')
+local zero = P('0')
+local digit = R('09')
+local octdigit = R("07")
+local lowercase = R("az")
+local uppercase = R("AZ")
+local underscore = P("_")
+local hexdigit = digit + lowercase + uppercase
local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
local newline = crlf + S("\r\n") -- cr + lf
local escaped = P("\\") * anything
local squote = P("'")
local dquote = P('"')
local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
+local period = P(".")
+local comma = P(",")
+
+local utfbom_32_be = P('\000\000\254\255') -- 00 00 FE FF
+local utfbom_32_le = P('\255\254\000\000') -- FF FE 00 00
+local utfbom_16_be = P('\254\255') -- FE FF
+local utfbom_16_le = P('\255\254') -- FF FE
+local utfbom_8 = P('\239\187\191') -- EF BB BF
local utfbom = utfbom_32_be + utfbom_32_le
+ utfbom_16_be + utfbom_16_le
+ utfbom_8
local utftype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le")
+ utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le")
+ utfbom_8 * Cc("utf-8") + alwaysmatched * Cc("utf-8") -- assume utf8
+local utfstricttype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le")
+ + utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le")
+ + utfbom_8 * Cc("utf-8")
local utfoffset = utfbom_32_be * Cc(4) + utfbom_32_le * Cc(4)
+ utfbom_16_be * Cc(2) + utfbom_16_le * Cc(2)
+ utfbom_8 * Cc(3) + Cc(0)
local utf8next = R("\128\191")
+patterns.utfbom_32_be = utfbom_32_be
+patterns.utfbom_32_le = utfbom_32_le
+patterns.utfbom_16_be = utfbom_16_be
+patterns.utfbom_16_le = utfbom_16_le
+patterns.utfbom_8 = utfbom_8
+
+patterns.utf_16_be_nl = P("\000\r\000\n") + P("\000\r") + P("\000\n")
+patterns.utf_16_le_nl = P("\r\000\n\000") + P("\r\000") + P("\n\000")
+
patterns.utf8one = R("\000\127")
patterns.utf8two = R("\194\223") * utf8next
patterns.utf8three = R("\224\239") * utf8next * utf8next
patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
patterns.utfbom = utfbom
patterns.utftype = utftype
+patterns.utfstricttype = utfstricttype
patterns.utfoffset = utfoffset
local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
@@ -135,31 +176,18 @@ patterns.whitespace = whitespace
patterns.nonspacer = nonspacer
patterns.nonwhitespace = nonwhitespace
-local stripper = spacer^0 * C((spacer^0 * nonspacer^1)^0) -- from example by roberto
+local stripper = spacer ^0 * C((spacer ^0 * nonspacer ^1)^0) -- from example by roberto
+local fullstripper = whitespace^0 * C((whitespace^0 * nonwhitespace^1)^0)
------ collapser = Cs(spacer^0/"" * ((spacer^1 * P(-1) / "") + (spacer^1/" ") + P(1))^0)
+----- collapser = Cs(spacer^0/"" * ((spacer^1 * endofstring / "") + (spacer^1/" ") + P(1))^0)
local collapser = Cs(spacer^0/"" * nonspacer^0 * ((spacer^0/" " * nonspacer^1)^0))
patterns.stripper = stripper
+patterns.fullstripper = fullstripper
patterns.collapser = collapser
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.unsigned = digit^0 * P('.') * digit^1
-patterns.float = sign^0 * patterns.unsigned
-patterns.cunsigned = digit^0 * P(',') * digit^1
-patterns.cfloat = sign^0 * patterns.cunsigned
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
+patterns.lowercase = lowercase
+patterns.uppercase = uppercase
patterns.letter = patterns.lowercase + patterns.uppercase
patterns.space = space
patterns.tab = P("\t")
@@ -167,12 +195,12 @@ patterns.spaceortab = patterns.space + patterns.tab
patterns.newline = newline
patterns.emptyline = newline^1
patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * spacer^0
-patterns.period = P(".")
+patterns.comma = comma
+patterns.commaspacer = comma * spacer^0
+patterns.period = period
patterns.colon = P(":")
patterns.semicolon = P(";")
-patterns.underscore = P("_")
+patterns.underscore = underscore
patterns.escaped = escaped
patterns.squote = squote
patterns.dquote = dquote
@@ -187,12 +215,38 @@ patterns.singlequoted = squote * patterns.nosquote * squote
patterns.doublequoted = dquote * patterns.nodquote * dquote
patterns.quoted = patterns.doublequoted + patterns.singlequoted
-patterns.propername = R("AZ","az","__") * R("09","AZ","az", "__")^0 * P(-1)
+patterns.digit = digit
+patterns.octdigit = octdigit
+patterns.hexdigit = hexdigit
+patterns.sign = sign
+patterns.cardinal = digit^1
+patterns.integer = sign^-1 * digit^1
+patterns.unsigned = digit^0 * period * digit^1
+patterns.float = sign^-1 * patterns.unsigned
+patterns.cunsigned = digit^0 * comma * digit^1
+patterns.cfloat = sign^-1 * patterns.cunsigned
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = zero * octdigit^1
+patterns.octal = patterns.oct
+patterns.HEX = zero * P("X") * (digit+uppercase)^1
+patterns.hex = zero * P("x") * (digit+lowercase)^1
+patterns.hexadecimal = zero * S("xX") * hexdigit^1
+
+patterns.hexafloat = sign^-1
+ * zero * S("xX")
+ * (hexdigit^0 * period * hexdigit^1 + hexdigit^1 * period * hexdigit^0 + hexdigit^1)
+ * (S("pP") * sign^-1 * hexdigit^1)^-1
+patterns.decafloat = sign^-1
+ * (digit^0 * period * digit^1 + digit^1 * period * digit^0 + digit^1)
+ * S("eE") * sign^-1 * digit^1
+
+patterns.propername = (uppercase + lowercase + underscore) * (uppercase + lowercase + underscore + digit)^0 * endofstring
patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
patterns.beginline = #(1-newline)
-patterns.longtostring = Cs(whitespace^0/"" * nonwhitespace^0 * ((whitespace^0/" " * (patterns.quoted + nonwhitespace)^1)^0))
+patterns.longtostring = Cs(whitespace^0/"" * ((patterns.quoted + nonwhitespace^1 + whitespace^1/"" * (P(-1) + Cc(" ")))^0))
local function anywhere(pattern) --slightly adapted from website
return P { P(pattern) + 1 * V(1) }
@@ -421,7 +475,10 @@ function lpeg.replacer(one,two,makefunction,isutf) -- in principle we should sor
end
end
-function lpeg.finder(lst,makefunction)
+-- local pattern1 = P(1-P(pattern))^0 * P(pattern) : test for not nil
+-- local pattern2 = (P(pattern) * Cc(true) + P(1))^0 : test for true (could be faster, but not much)
+
+function lpeg.finder(lst,makefunction,isutf) -- beware: slower than find with 'patternless finds'
local pattern
if type(lst) == "table" then
pattern = P(false)
@@ -437,7 +494,12 @@ function lpeg.finder(lst,makefunction)
else
pattern = P(lst)
end
- pattern = (1-pattern)^0 * pattern
+ if isutf then
+-- pattern = ((utf8char or 1)-pattern)^0 * pattern
+ pattern = ((utf8char or 1)-pattern)^0 * pattern
+ else
+ pattern = (1-pattern)^0 * pattern
+ end
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -456,8 +518,8 @@ local splitters_f, splitters_s = { }, { }
function lpeg.firstofsplit(separator) -- always return value
local splitter = splitters_f[separator]
if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
+ local pattern = P(separator)
+ splitter = C((1 - pattern)^0)
splitters_f[separator] = splitter
end
return splitter
@@ -466,13 +528,35 @@ end
function lpeg.secondofsplit(separator) -- nil if not split
local splitter = splitters_s[separator]
if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
+ local pattern = P(separator)
+ splitter = (1 - pattern)^0 * pattern * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+local splitters_s, splitters_p = { }, { }
+
+function lpeg.beforesuffix(separator) -- nil if nothing but empty is ok
+ local splitter = splitters_s[separator]
+ if not splitter then
+ local pattern = P(separator)
+ splitter = C((1 - pattern)^0) * pattern * endofstring
splitters_s[separator] = splitter
end
return splitter
end
+function lpeg.afterprefix(separator) -- nil if nothing but empty is ok
+ local splitter = splitters_p[separator]
+ if not splitter then
+ local pattern = P(separator)
+ splitter = pattern * C(anything^0)
+ splitters_p[separator] = splitter
+ end
+ return splitter
+end
+
function lpeg.balancer(left,right)
left, right = P(left), P(right)
return P { left * ((1 - left - right) + V(1))^0 * right }
@@ -832,9 +916,9 @@ end
-- moved here (before util-str)
-local digit = R("09")
-local period = P(".")
-local zero = P("0")
+----- digit = R("09")
+----- period = P(".")
+----- zero = P("0")
local trailingzeros = zero^0 * -digit -- suggested by Roberto R
local case_1 = period * trailingzeros / ""
local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
diff --git a/Master/texmf-dist/tex/context/base/l-lua.lua b/Master/texmf-dist/tex/context/base/l-lua.lua
index fc05afa6714..9565f484a2a 100644
--- a/Master/texmf-dist/tex/context/base/l-lua.lua
+++ b/Master/texmf-dist/tex/context/base/l-lua.lua
@@ -6,6 +6,17 @@ if not modules then modules = { } end modules ['l-lua'] = {
license = "see context related readme files"
}
+-- potential issues with 5.3:
+
+-- i'm not sure yet if the int/float change is good for luatex
+
+-- math.min
+-- math.max
+-- tostring
+-- tonumber
+-- utf.*
+-- bit32
+
-- compatibility hacksand helpers
local major, minor = string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
@@ -148,3 +159,9 @@ function optionalrequire(...)
return result
end
end
+
+-- nice for non ascii scripts (this might move):
+
+if lua then
+ lua.mask = load([[τεχ = 1]]) and "utf" or "ascii"
+end
diff --git a/Master/texmf-dist/tex/context/base/l-os.lua b/Master/texmf-dist/tex/context/base/l-os.lua
index 05ca0acdc6a..1dff79cd3f3 100644
--- a/Master/texmf-dist/tex/context/base/l-os.lua
+++ b/Master/texmf-dist/tex/context/base/l-os.lua
@@ -127,11 +127,17 @@ function io.popen (...) ioflush() return iopopen(...) end
function os.resultof(command)
local handle = io.popen(command,"r")
- return handle and handle:read("*all") or ""
+ if handle then
+ local result = handle:read("*all") or ""
+ handle:close()
+ return result
+ else
+ return ""
+ end
end
if not io.fileseparator then
- if find(os.getenv("PATH"),";") then
+ if find(os.getenv("PATH"),";",1,true) then
io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin"
else
io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix"
@@ -172,26 +178,27 @@ if not os.times then -- ?
end
end
-os.gettimeofday = os.gettimeofday or os.clock
-local startuptime = os.gettimeofday()
+local gettimeofday = os.gettimeofday or os.clock
+os.gettimeofday = gettimeofday
+
+local startuptime = gettimeofday()
function os.runtime()
- return os.gettimeofday() - startuptime
+ return gettimeofday() - startuptime
end
---~ print(os.gettimeofday()-os.time())
---~ os.sleep(1.234)
---~ print (">>",os.runtime())
---~ print(os.date("%H:%M:%S",os.gettimeofday()))
---~ print(os.date("%H:%M:%S",os.time()))
+-- print(os.gettimeofday()-os.time())
+-- os.sleep(1.234)
+-- print (">>",os.runtime())
+-- print(os.date("%H:%M:%S",os.gettimeofday()))
+-- print(os.date("%H:%M:%S",os.time()))
-- no need for function anymore as we have more clever code and helpers now
-- this metatable trickery might as well disappear
-os.resolvers = os.resolvers or { } -- will become private
-
-local resolvers = os.resolvers
+local resolvers = os.resolvers or { }
+os.resolvers = resolvers
setmetatable(os, { __index = function(t,k)
local r = resolvers[k]
@@ -214,6 +221,8 @@ local function guess()
return os.resultof("echo $HOSTTYPE") or ""
end
+-- os.bits = 32 | 64
+
if platform ~= "" then
os.platform = platform
@@ -222,10 +231,14 @@ elseif os.type == "windows" then
-- we could set the variable directly, no function needed here
- function os.resolvers.platform(t,k)
+ -- PROCESSOR_ARCHITECTURE : binary platform
+ -- PROCESSOR_ARCHITEW6432 : OS platform
+
+ function resolvers.platform(t,k)
local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or ""
- if find(architecture,"AMD64") then
- platform = "mswin-64"
+ if find(architecture,"AMD64",1,true) then
+ -- platform = "mswin-64"
+ platform = "win64"
else
platform = "mswin"
end
@@ -236,12 +249,12 @@ elseif os.type == "windows" then
elseif name == "linux" then
- function os.resolvers.platform(t,k)
+ function resolvers.platform(t,k)
-- we sometimes have HOSTTYPE set so let's check that first
local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
+ if find(architecture,"x86_64",1,true) then
platform = "linux-64"
- elseif find(architecture,"ppc") then
+ elseif find(architecture,"ppc",1,true) then
platform = "linux-ppc"
else
platform = "linux"
@@ -263,7 +276,7 @@ elseif name == "macosx" then
therefore not permitted to run the 64 bit kernel.
]]--
- function os.resolvers.platform(t,k)
+ function resolvers.platform(t,k)
-- local platform, architecture = "", os.getenv("HOSTTYPE") or ""
-- if architecture == "" then
-- architecture = os.resultof("echo $HOSTTYPE") or ""
@@ -272,9 +285,9 @@ elseif name == "macosx" then
if architecture == "" then
-- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n")
platform = "osx-intel"
- elseif find(architecture,"i386") then
+ elseif find(architecture,"i386",1,true) then
platform = "osx-intel"
- elseif find(architecture,"x86_64") then
+ elseif find(architecture,"x86_64",1,true) then
platform = "osx-64"
else
platform = "osx-ppc"
@@ -286,9 +299,9 @@ elseif name == "macosx" then
elseif name == "sunos" then
- function os.resolvers.platform(t,k)
+ function resolvers.platform(t,k)
local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"sparc") then
+ if find(architecture,"sparc",1,true) then
platform = "solaris-sparc"
else -- if architecture == 'i86pc'
platform = "solaris-intel"
@@ -300,9 +313,9 @@ elseif name == "sunos" then
elseif name == "freebsd" then
- function os.resolvers.platform(t,k)
+ function resolvers.platform(t,k)
local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"amd64") then
+ if find(architecture,"amd64",1,true) then
platform = "freebsd-amd64"
else
platform = "freebsd"
@@ -314,10 +327,10 @@ elseif name == "freebsd" then
elseif name == "kfreebsd" then
- function os.resolvers.platform(t,k)
+ function resolvers.platform(t,k)
-- we sometimes have HOSTTYPE set so let's check that first
local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
+ if find(architecture,"x86_64",1,true) then
platform = "kfreebsd-amd64"
else
platform = "kfreebsd-i386"
@@ -333,7 +346,7 @@ else
-- os.setenv("MTX_PLATFORM",platform)
-- os.platform = platform
- function os.resolvers.platform(t,k)
+ function resolvers.platform(t,k)
local platform = "linux"
os.setenv("MTX_PLATFORM",platform)
os.platform = platform
@@ -342,6 +355,12 @@ else
end
+function resolvers.bits(t,k)
+ local bits = find(os.platform,"64",1,true) and 64 or 32
+ os.bits = bits
+ return bits
+end
+
-- beware, we set the randomseed
-- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the
@@ -380,31 +399,43 @@ end
local timeformat = format("%%s%s",os.timezone(true))
local dateformat = "!%Y-%m-%d %H:%M:%S"
+local lasttime = nil
+local lastdate = nil
function os.fulltime(t,default)
- t = tonumber(t) or 0
+ t = t and tonumber(t) or 0
if t > 0 then
-- valid time
elseif default then
return default
else
- t = nil
+ t = time()
+ end
+ if t ~= lasttime then
+ lasttime = t
+ lastdate = format(timeformat,date(dateformat))
end
- return format(timeformat,date(dateformat,t))
+ return lastdate
end
local dateformat = "%Y-%m-%d %H:%M:%S"
+local lasttime = nil
+local lastdate = nil
function os.localtime(t,default)
- t = tonumber(t) or 0
+ t = t and tonumber(t) or 0
if t > 0 then
-- valid time
elseif default then
return default
else
- t = nil
+ t = time()
end
- return date(dateformat,t)
+ if t ~= lasttime then
+ lasttime = t
+ lastdate = date(dateformat,t)
+ end
+ return lastdate
end
function os.converttime(t,default)
@@ -472,3 +503,60 @@ end
-- print(os.which("inkscape"))
-- print(os.which("gs.exe"))
-- print(os.which("ps2pdf"))
+
+-- These are moved from core-con.lua (as I needed them elsewhere).
+
+local function isleapyear(year)
+ return (year % 400 == 0) or ((year % 100 ~= 0) and (year % 4 == 0))
+end
+
+os.isleapyear = isleapyear
+
+-- nicer:
+--
+-- local days = {
+-- [false] = { 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 },
+-- [true] = { 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 }
+-- }
+--
+-- local function nofdays(year,month)
+-- return days[isleapyear(year)][month]
+-- return month == 2 and isleapyear(year) and 29 or days[month]
+-- end
+--
+-- more efficient:
+
+local days = { 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 }
+
+local function nofdays(year,month)
+ if not month then
+ return isleapyear(year) and 365 or 364
+ else
+ return month == 2 and isleapyear(year) and 29 or days[month]
+ end
+end
+
+os.nofdays = nofdays
+
+function os.weekday(day,month,year)
+ return date("%w",time { year = year, month = month, day = day }) + 1
+end
+
+function os.validdate(year,month,day)
+ -- we assume that all three values are set
+ -- year is always ok, even if lua has a 1970 time limit
+ if month < 1 then
+ month = 1
+ elseif month > 12 then
+ month = 12
+ end
+ if day < 1 then
+ day = 1
+ else
+ local max = nofdays(year,month)
+ if day > max then
+ day = max
+ end
+ end
+ return year, month, day
+end
diff --git a/Master/texmf-dist/tex/context/base/l-package.lua b/Master/texmf-dist/tex/context/base/l-package.lua
index 579fd3941c6..075fcde25ae 100644
--- a/Master/texmf-dist/tex/context/base/l-package.lua
+++ b/Master/texmf-dist/tex/context/base/l-package.lua
@@ -17,7 +17,7 @@ if not modules then modules = { } end modules ['l-package'] = {
-- -- local mysql = require("luasql.mysql")
local type = type
-local gsub, format = string.gsub, string.format
+local gsub, format, find = string.gsub, string.format, string.find
local P, S, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.Cs, lpeg.match
@@ -68,6 +68,7 @@ local helpers = package.helpers or {
sequence = {
"already loaded",
"preload table",
+ "qualified path", -- beware, lua itself doesn't handle qualified paths (prepends ./)
"lua extra list",
"lib extra list",
"path specification",
@@ -243,6 +244,23 @@ end
helpers.loadedbypath = loadedbypath
+local function loadedbyname(name,rawname)
+ if find(name,"^/") or find(name,"^[a-zA-Z]:/") then
+ local trace=helpers.trace
+ if trace then
+ helpers.report("qualified name, identifying '%s'",what,name)
+ end
+ if isreadable(name) then
+ if trace then
+ helpers.report("qualified name, '%s' found",what,name)
+ end
+ return loadfile(name)
+ end
+ end
+end
+
+helpers.loadedbyname = loadedbyname
+
methods["already loaded"] = function(name)
return package.loaded[name]
end
@@ -251,8 +269,12 @@ methods["preload table"] = function(name)
return builtin["preload table"](name)
end
+methods["qualified path"]=function(name)
+ return loadedbyname(addsuffix(lualibfile(name),"lua"),name)
+end
+
methods["lua extra list"] = function(name)
- return loadedbypath(addsuffix(lualibfile(name),"lua" ),name,getextraluapaths(),false,"lua")
+ return loadedbypath(addsuffix(lualibfile(name),"lua"),name,getextraluapaths(),false,"lua")
end
methods["lib extra list"] = function(name)
diff --git a/Master/texmf-dist/tex/context/base/l-pdfview.lua b/Master/texmf-dist/tex/context/base/l-pdfview.lua
index 6e0259299ce..6302fd6f6ac 100644
--- a/Master/texmf-dist/tex/context/base/l-pdfview.lua
+++ b/Master/texmf-dist/tex/context/base/l-pdfview.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['l-pdfview'] = {
license = "see context related readme files"
}
+-- Todo: add options in cnf file
+
-- Todo: figure out pdfopen/pdfclose on linux. Calling e.g. okular directly
-- doesn't work in linux when issued from scite as it blocks the editor (no
-- & possible or so). Unfortunately pdfopen keeps changing with not keeping
@@ -15,62 +17,107 @@ if not modules then modules = { } end modules ['l-pdfview'] = {
local format, concat = string.format, table.concat
+local report = logs.reporter("pdfview")
+local replace = utilities.templates.replace
+
pdfview = pdfview or { }
-local opencalls, closecalls, allcalls, runner
+local opencalls -- a table with templates that open a given pdf document
+local closecalls -- a table with templates that close a given pdf document
+local allcalls -- a table with templates that close all open pdf documents
+local runner -- runner function
+local expander -- filename cleanup function
--- this might become template based
+-- maybe spawn/execute spec in calls
if os.type == "windows" then
+ -- os.setenv("path",os.getenv("path") .. ";" .. "c:/data/system/pdf-xchange")
+ -- os.setenv("path",os.getenv("path") .. ";" .. "c:/data/system/sumatrapdf")
+
+ -- start is more flexible as it locates binaries in more places and doesn't lock
+
opencalls = {
- ['default'] = "pdfopen --rxi --file",
- ['acrobat'] = "pdfopen --rxi --file",
- ['fullacrobat'] = "pdfopen --axi --file",
- ['okular'] = 'start "test" "c:/data/system/kde/bin/okular.exe" --unique', -- todo!
- ['sumatra'] = 'start "test" "c:/data/system/sumatrapdf/sumatrapdf.exe" -reuse-instance',
- ['okular'] = 'start "test" "okular.exe" --unique',
- ['sumatra'] = 'start "test" "sumatrapdf.exe" -reuse-instance -bg-color 0xCCCCCC',
+ ['default'] = [[pdfopen --rxi --file "%filename%"]],
+ ['acrobat'] = [[pdfopen --rxi --file "%filename%"]],
+ ['fullacrobat'] = [[pdfopen --axi --file "%filename%"]],
+ ['okular'] = [[start "test" okular.exe --unique "%filename%"]],
+ ['pdfxcview'] = [[start "test" pdfxcview.exe /A "nolock=yes=OpenParameters" "%filename%"]],
+ ['sumatra'] = [[start "test" sumatrapdf.exe -reuse-instance -bg-color 0xCCCCCC "%filename%"]],
+ ['auto'] = [[start "%filename%"]],
}
closecalls= {
- ['default'] = "pdfclose --file",
- ['acrobat'] = "pdfclose --file",
- ['okular'] = false,
- ['sumatra'] = false,
+ ['default'] = [[pdfclose --file "%filename%"]],
+ ['acrobat'] = [[pdfclose --file "%filename%"]],
+ ['okular'] = false,
+ ['pdfxcview'] = false, -- [[pdfxcview.exe /close:discard "%filename%"]],
+ ['sumatra'] = false,
+ ['auto'] = false,
}
allcalls = {
- ['default'] = "pdfclose --all",
- ['acrobat'] = "pdfclose --all",
- ['okular'] = false,
- ['sumatra'] = false,
+ ['default'] = [[pdfclose --all]],
+ ['acrobat'] = [[pdfclose --all]],
+ ['okular'] = false,
+ ['pdfxcview'] = false,
+ ['sumatra'] = false,
+ ['auto'] = false,
}
- pdfview.method = "acrobat" -- no longer usefull due to green pop up line and clasing reader/full
+ pdfview.method = "acrobat" -- no longer useful due to green pop up line and clashing reader/full
+ -- pdfview.method = "pdfxcview"
pdfview.method = "sumatra"
- runner = function(cmd)
- os.execute(cmd) -- .. " > /null"
+ runner = function(template,variables)
+ local cmd = replace(template,variables)
+ -- cmd = cmd .. " > /null"
+ report("command: %s",cmd)
+ os.execute(cmd)
+ end
+
+ expander = function(name)
+ -- We need to avoid issues with chdir to UNC paths and therefore expand
+ -- the path when we're current. (We could use one of the helpers instead)
+ if file.pathpart(name) == "" then
+ return file.collapsepath(file.join(lfs.currentdir(),name))
+ else
+ return name
+ end
end
else
opencalls = {
- ['default'] = "pdfopen", -- we could pass the default here
- ['okular'] = 'okular --unique'
+ ['default'] = [[pdfopen "%filename%"]],
+ ['okular'] = [[okular --unique "%filename%"]],
+ ['sumatra'] = [[wine "sumatrapdf.exe" -reuse-instance -bg-color 0xCCCCCC "%filename%"]],
+ ['pdfxcview'] = [[wine "pdfxcview.exe" /A "nolock=yes=OpenParameters" "%filename%"]],
+ ['auto'] = [[open "%filename%"]],
}
closecalls= {
- ['default'] = "pdfclose --file",
- ['okular'] = false,
+ ['default'] = [[pdfclose --file "%filename%"]],
+ ['okular'] = false,
+ ['sumatra'] = false,
+ ['auto'] = false,
}
allcalls = {
- ['default'] = "pdfclose --all",
- ['okular'] = false,
+ ['default'] = [[pdfclose --all]],
+ ['okular'] = false,
+ ['sumatra'] = false,
+ ['auto'] = false,
}
pdfview.method = "okular"
+ pdfview.method = "sumatra" -- faster and more complete
- runner = function(cmd)
- os.execute(cmd .. " 1>/dev/null 2>/dev/null &")
+ runner = function(template,variables)
+ local cmd = replace(template,variables)
+ cmd = cmd .. " 1>/dev/null 2>/dev/null &"
+ report("command: %s",cmd)
+ os.execute(cmd)
+ end
+
+ expander = function(name)
+ return name
end
end
@@ -93,8 +140,6 @@ function pdfview.status()
return format("pdfview methods: %s, current method: %s (directives_pdfview_method)",pdfview.methods(),tostring(pdfview.method))
end
--- local openedfiles = { }
-
local function fullname(name)
return file.addsuffix(name,"pdf")
end
@@ -104,10 +149,9 @@ function pdfview.open(...)
if opencall then
local t = { ... }
for i=1,#t do
- local name = fullname(t[i])
+ local name = expander(fullname(t[i]))
if io.exists(name) then
- runner(format('%s "%s"', opencall, name))
- -- openedfiles[name] = true
+ runner(opencall,{ filename = name })
end
end
end
@@ -118,14 +162,10 @@ function pdfview.close(...)
if closecall then
local t = { ... }
for i=1,#t do
- local name = fullname(t[i])
- -- if openedfiles[name] then
- runner(format('%s "%s"', closecall, name))
- -- openedfiles[name] = nil
- -- else
- -- pdfview.closeall()
- -- break
- -- end
+ local name = expander(fullname(t[i]))
+ if io.exists(name) then
+ replace(closecall,{ filename = name })
+ end
end
end
end
@@ -133,13 +173,8 @@ end
function pdfview.closeall()
local allcall = allcalls[pdfview.method]
if allcall then
- runner(format('%s', allcall))
+ runner(allcall)
end
- -- openedfiles = { }
end
---~ pdfview.open("t:/document/show-exa.pdf")
---~ os.sleep(3)
---~ pdfview.close("t:/document/show-exa.pdf")
-
return pdfview
diff --git a/Master/texmf-dist/tex/context/base/l-string.lua b/Master/texmf-dist/tex/context/base/l-string.lua
index 77c076cc53e..3b1a0003ff9 100644
--- a/Master/texmf-dist/tex/context/base/l-string.lua
+++ b/Master/texmf-dist/tex/context/base/l-string.lua
@@ -70,6 +70,7 @@ function string.limit(str,n,sentinel) -- not utf proof
end
local stripper = patterns.stripper
+local fullstripper = patterns.fullstripper
local collapser = patterns.collapser
local longtostring = patterns.longtostring
@@ -77,6 +78,10 @@ function string.strip(str)
return lpegmatch(stripper,str) or ""
end
+function string.fullstrip(str)
+ return lpegmatch(fullstripper,str) or ""
+end
+
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -91,6 +96,8 @@ end
local pattern = P(" ")^0 * P(-1)
+-- patterns.onlyspaces = pattern
+
function string.is_empty(str)
if str == "" then
return true
diff --git a/Master/texmf-dist/tex/context/base/l-table.lua b/Master/texmf-dist/tex/context/base/l-table.lua
index 9a1b97fff78..d231830eddc 100644
--- a/Master/texmf-dist/tex/context/base/l-table.lua
+++ b/Master/texmf-dist/tex/context/base/l-table.lua
@@ -16,6 +16,9 @@ local lpegmatch, patterns = lpeg.match, lpeg.patterns
local floor = math.floor
-- extra functions, some might go (when not used)
+--
+-- we could serialize using %a but that won't work well is in the code we mostly use
+-- floats and as such we get unequality e.g. in version comparisons
local stripper = patterns.stripper
@@ -85,6 +88,38 @@ local function sortedkeys(tab)
end
end
+local function sortedhashonly(tab)
+ if tab then
+ local srt, s = { }, 0
+ for key,_ in next, tab do
+ if type(key) == "string" then
+ s = s + 1
+ srt[s] = key
+ end
+ end
+ sort(srt)
+ return srt
+ else
+ return { }
+ end
+end
+
+local function sortedindexonly(tab)
+ if tab then
+ local srt, s = { }, 0
+ for key,_ in next, tab do
+ if type(key) == "number" then
+ s = s + 1
+ srt[s] = key
+ end
+ end
+ sort(srt)
+ return srt
+ else
+ return { }
+ end
+end
+
local function sortedhashkeys(tab,cmp) -- fast one
if tab then
local srt, s = { }, 0
@@ -111,8 +146,10 @@ function table.allkeys(t)
return sortedkeys(keys)
end
-table.sortedkeys = sortedkeys
-table.sortedhashkeys = sortedhashkeys
+table.sortedkeys = sortedkeys
+table.sortedhashonly = sortedhashonly
+table.sortedindexonly = sortedindexonly
+table.sortedhashkeys = sortedhashkeys
local function nothing() end
@@ -126,10 +163,13 @@ local function sortedhash(t,cmp)
s = sortedkeys(t) -- the robust one
end
local n = 0
+ local m = #s
local function kv(s)
- n = n + 1
- local k = s[n]
- return k, t[k]
+ if n < m then
+ n = n + 1
+ local k = s[n]
+ return k, t[k]
+ end
end
return kv, s
else
@@ -343,6 +383,7 @@ local noquotes, hexify, handle, reduce, compact, inline, functions
local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key
'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if',
'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while',
+ 'NaN', 'goto',
}
local function simple_table(t)
@@ -363,12 +404,12 @@ local function simple_table(t)
else
tt[nt] = tostring(v) -- tostring not needed
end
- elseif tv == "boolean" then
- nt = nt + 1
- tt[nt] = tostring(v)
elseif tv == "string" then
nt = nt + 1
tt[nt] = format("%q",v)
+ elseif tv == "boolean" then
+ nt = nt + 1
+ tt[nt] = v and "true" or "false"
else
tt = nil
break
@@ -394,7 +435,8 @@ end
-- todo: %g faster on numbers than %s
--- we can speed this up with repeaters and formatters (is indeed faster)
+-- we can speed this up with repeaters and formatters but we haven't defined them
+-- yet
local propername = patterns.propername -- was find(name,"^%a[%w%_]*$")
@@ -420,7 +462,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s[%q]={",depth,name))
end
elseif tn == "boolean" then
- handle(format("%s[%s]={",depth,tostring(name)))
+ handle(format("%s[%s]={",depth,name and "true" or "false"))
else
handle(format("%s{",depth))
end
@@ -456,21 +498,21 @@ local function do_serialize(root,name,depth,level,indexed)
--~ if v == root then
-- circular
--~ else
- local t, tk = type(v), type(k)
+ local tv, tk = type(v), type(k)
if compact and first and tk == "number" and k >= first and k <= last then
- if t == "number" then
+ if tv == "number" then
if hexify then
handle(format("%s 0x%04X,",depth,v))
else
handle(format("%s %s,",depth,v)) -- %.99g
end
- elseif t == "string" then
+ elseif tv == "string" then
if reduce and tonumber(v) then
handle(format("%s %s,",depth,v))
else
handle(format("%s %q,",depth,v))
end
- elseif t == "table" then
+ elseif tv == "table" then
if not next(v) then
handle(format("%s {},",depth))
elseif inline then -- and #t > 0
@@ -483,11 +525,11 @@ local function do_serialize(root,name,depth,level,indexed)
else
do_serialize(v,k,depth,level+1,true)
end
- elseif t == "boolean" then
- handle(format("%s %s,",depth,tostring(v)))
- elseif t == "function" then
+ elseif tv == "boolean" then
+ handle(format("%s %s,",depth,v and "true" or "false"))
+ elseif tv == "function" then
if functions then
- handle(format('%s load(%q),',depth,dump(v)))
+ handle(format('%s load(%q),',depth,dump(v))) -- maybe strip
else
handle(format('%s "function",',depth))
end
@@ -498,7 +540,7 @@ local function do_serialize(root,name,depth,level,indexed)
if false then
handle(format("%s __p__=nil,",depth))
end
- elseif t == "number" then
+ elseif tv == "number" then
if tk == "number" then
if hexify then
handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
@@ -507,9 +549,9 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif tk == "boolean" then
if hexify then
- handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
+ handle(format("%s [%s]=0x%04X,",depth,k and "true" or "false",v))
else
- handle(format("%s [%s]=%s,",depth,tostring(k),v)) -- %.99g
+ handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) -- %.99g
end
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
if hexify then
@@ -524,7 +566,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g
end
end
- elseif t == "string" then
+ elseif tv == "string" then
if reduce and tonumber(v) then
if tk == "number" then
if hexify then
@@ -533,7 +575,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]=%s,",depth,k,v))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=%s,",depth,k,v))
else
@@ -547,14 +589,14 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]=%q,",depth,k,v))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),v))
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=%q,",depth,k,v))
else
handle(format("%s [%q]=%q,",depth,k,v))
end
end
- elseif t == "table" then
+ elseif tv == "table" then
if not next(v) then
if tk == "number" then
if hexify then
@@ -563,7 +605,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]={},",depth,k))
end
elseif tk == "boolean" then
- handle(format("%s [%s]={},",depth,tostring(k)))
+ handle(format("%s [%s]={},",depth,k and "true" or "false"))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s={},",depth,k))
else
@@ -579,7 +621,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
end
elseif tk == "boolean" then
- handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
+ handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", ")))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
else
@@ -591,24 +633,24 @@ local function do_serialize(root,name,depth,level,indexed)
else
do_serialize(v,k,depth,level+1)
end
- elseif t == "boolean" then
+ elseif tv == "boolean" then
if tk == "number" then
if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
+ handle(format("%s [0x%04X]=%s,",depth,k,v and "true" or "false"))
else
- handle(format("%s [%s]=%s,",depth,k,tostring(v)))
+ handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
+ handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false"))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,tostring(v)))
+ handle(format("%s %s=%s,",depth,k,v and "true" or "false"))
else
- handle(format("%s [%q]=%s,",depth,k,tostring(v)))
+ handle(format("%s [%q]=%s,",depth,k,v and "true" or "false"))
end
- elseif t == "function" then
+ elseif tv == "function" then
if functions then
- local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v) -- maybe strip
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v) -- maybe strip
if tk == "number" then
if hexify then
handle(format("%s [0x%04X]=load(%q),",depth,k,f))
@@ -616,7 +658,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]=load(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
+ handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=load(%q),",depth,k,f))
else
@@ -631,7 +673,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]=%q,",depth,k,tostring(v)))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v)))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=%q,",depth,k,tostring(v)))
else
@@ -713,321 +755,10 @@ local function serialize(_handle,root,name,specification) -- handle wins
handle("}")
end
--- -- This is some 20% faster than using format (because formatters are much faster) but
--- -- of course, inlining the format using .. is then again faster .. anyway, as we do
--- -- some pretty printing as well there is not that much to gain unless we make a 'fast'
--- -- ugly variant as well. But, we would have to move the formatter to l-string then.
-
--- local formatters = string.formatters
-
--- local function do_serialize(root,name,level,indexed)
--- if level > 0 then
--- if indexed then
--- handle(formatters["%w{"](level))
--- else
--- local tn = type(name)
--- if tn == "number" then
--- if hexify then
--- handle(formatters["%w[%04H]={"](level,name))
--- else
--- handle(formatters["%w[%s]={"](level,name))
--- end
--- elseif tn == "string" then
--- if noquotes and not reserved[name] and lpegmatch(propername,name) then
--- handle(formatters["%w%s={"](level,name))
--- else
--- handle(formatters["%w[%q]={"](level,name))
--- end
--- elseif tn == "boolean" then
--- handle(formatters["%w[%S]={"](level,name))
--- else
--- handle(formatters["%w{"](level))
--- end
--- end
--- end
--- -- we could check for k (index) being number (cardinal)
--- if root and next(root) then
--- -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
--- -- if compact then
--- -- -- NOT: for k=1,#root do (we need to quit at nil)
--- -- for k,v in ipairs(root) do -- can we use next?
--- -- if not first then first = k end
--- -- last = last + 1
--- -- end
--- -- end
--- local first, last = nil, 0
--- if compact then
--- last = #root
--- for k=1,last do
--- if root[k] == nil then
--- last = k - 1
--- break
--- end
--- end
--- if last > 0 then
--- first = 1
--- end
--- end
--- local sk = sortedkeys(root)
--- for i=1,#sk do
--- local k = sk[i]
--- local v = root[k]
--- --~ if v == root then
--- -- circular
--- --~ else
--- local t, tk = type(v), type(k)
--- if compact and first and tk == "number" and k >= first and k <= last then
--- if t == "number" then
--- if hexify then
--- handle(formatters["%w %04H,"](level,v))
--- else
--- handle(formatters["%w %s,"](level,v)) -- %.99g
--- end
--- elseif t == "string" then
--- if reduce and tonumber(v) then
--- handle(formatters["%w %s,"](level,v))
--- else
--- handle(formatters["%w %q,"](level,v))
--- end
--- elseif t == "table" then
--- if not next(v) then
--- handle(formatters["%w {},"](level))
--- elseif inline then -- and #t > 0
--- local st = simple_table(v)
--- if st then
--- handle(formatters["%w { %, t },"](level,st))
--- else
--- do_serialize(v,k,level+1,true)
--- end
--- else
--- do_serialize(v,k,level+1,true)
--- end
--- elseif t == "boolean" then
--- handle(formatters["%w %S,"](level,v))
--- elseif t == "function" then
--- if functions then
--- handle(formatters['%w load(%q),'](level,dump(v)))
--- else
--- handle(formatters['%w "function",'](level))
--- end
--- else
--- handle(formatters["%w %Q,"](level,v))
--- end
--- elseif k == "__p__" then -- parent
--- if false then
--- handle(formatters["%w __p__=nil,"](level))
--- end
--- elseif t == "number" then
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]=%04H,"](level,k,v))
--- else
--- handle(formatters["%w [%s]=%s,"](level,k,v)) -- %.99g
--- end
--- elseif tk == "boolean" then
--- if hexify then
--- handle(formatters["%w [%S]=%04H,"](level,k,v))
--- else
--- handle(formatters["%w [%S]=%s,"](level,k,v)) -- %.99g
--- end
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- if hexify then
--- handle(formatters["%w %s=%04H,"](level,k,v))
--- else
--- handle(formatters["%w %s=%s,"](level,k,v)) -- %.99g
--- end
--- else
--- if hexify then
--- handle(formatters["%w [%q]=%04H,"](level,k,v))
--- else
--- handle(formatters["%w [%q]=%s,"](level,k,v)) -- %.99g
--- end
--- end
--- elseif t == "string" then
--- if reduce and tonumber(v) then
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]=%s,"](level,k,v))
--- else
--- handle(formatters["%w [%s]=%s,"](level,k,v))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]=%s,"](level,k,v))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s=%s,"](level,k,v))
--- else
--- handle(formatters["%w [%q]=%s,"](level,k,v))
--- end
--- else
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]=%q,"](level,k,v))
--- else
--- handle(formatters["%w [%s]=%q,"](level,k,v))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]=%q,"](level,k,v))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s=%q,"](level,k,v))
--- else
--- handle(formatters["%w [%q]=%q,"](level,k,v))
--- end
--- end
--- elseif t == "table" then
--- if not next(v) then
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]={},"](level,k))
--- else
--- handle(formatters["%w [%s]={},"](level,k))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]={},"](level,k))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s={},"](level,k))
--- else
--- handle(formatters["%w [%q]={},"](level,k))
--- end
--- elseif inline then
--- local st = simple_table(v)
--- if st then
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]={ %, t },"](level,k,st))
--- else
--- handle(formatters["%w [%s]={ %, t },"](level,k,st))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]={ %, t },"](level,k,st))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s={ %, t },"](level,k,st))
--- else
--- handle(formatters["%w [%q]={ %, t },"](level,k,st))
--- end
--- else
--- do_serialize(v,k,level+1)
--- end
--- else
--- do_serialize(v,k,level+1)
--- end
--- elseif t == "boolean" then
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]=%S,"](level,k,v))
--- else
--- handle(formatters["%w [%s]=%S,"](level,k,v))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]=%S,"](level,k,v))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s=%S,"](level,k,v))
--- else
--- handle(formatters["%w [%q]=%S,"](level,k,v))
--- end
--- elseif t == "function" then
--- if functions then
--- local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
--- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]=load(%q),"](level,k,f))
--- else
--- handle(formatters["%w [%s]=load(%q),"](level,k,f))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]=load(%q),"](level,k,f))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s=load(%q),"](level,k,f))
--- else
--- handle(formatters["%w [%q]=load(%q),"](level,k,f))
--- end
--- end
--- else
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]=%Q,"](level,k,v))
--- else
--- handle(formatters["%w [%s]=%Q,"](level,k,v))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]=%Q,"](level,k,v))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s=%Q,"](level,k,v))
--- else
--- handle(formatters["%w [%q]=%Q,"](level,k,v))
--- end
--- end
--- --~ end
--- end
--- end
--- if level > 0 then
--- handle(formatters["%w}"](level))
--- end
--- end
-
--- local function serialize(_handle,root,name,specification) -- handle wins
--- local tname = type(name)
--- if type(specification) == "table" then
--- noquotes = specification.noquotes
--- hexify = specification.hexify
--- handle = _handle or specification.handle or print
--- reduce = specification.reduce or false
--- functions = specification.functions
--- compact = specification.compact
--- inline = specification.inline and compact
--- if functions == nil then
--- functions = true
--- end
--- if compact == nil then
--- compact = true
--- end
--- if inline == nil then
--- inline = compact
--- end
--- else
--- noquotes = false
--- hexify = false
--- handle = _handle or print
--- reduce = false
--- compact = true
--- inline = true
--- functions = true
--- end
--- if tname == "string" then
--- if name == "return" then
--- handle("return {")
--- else
--- handle(name .. "={")
--- end
--- elseif tname == "number" then
--- if hexify then
--- handle(format("[0x%04X]={",name))
--- else
--- handle("[" .. name .. "]={")
--- end
--- elseif tname == "boolean" then
--- if name then
--- handle("return {")
--- else
--- handle("{")
--- end
--- else
--- handle("t={")
--- end
--- if root then
--- -- The dummy access will initialize a table that has a delayed initialization
--- -- using a metatable. (maybe explicitly test for metatable)
--- if getmetatable(root) then -- todo: make this an option, maybe even per subtable
--- local dummy = root._w_h_a_t_e_v_e_r_
--- root._w_h_a_t_e_v_e_r_ = nil
--- end
--- -- Let's forget about empty tables.
--- if next(root) then
--- do_serialize(root,name,0)
--- end
--- end
--- handle("}")
--- end
+-- A version with formatters is some 20% faster than using format (because formatters are
+-- much faster) but of course, inlining the format using .. is then again faster .. anyway,
+-- as we do some pretty printing as well there is not that much to gain unless we make a
+-- 'fast' ugly variant as well. But, we would have to move the formatter to l-string then.
-- name:
--
@@ -1309,7 +1040,9 @@ function table.print(t,...)
end
end
-setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end)
+if setinspector then
+ setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end)
+end
-- -- -- obsolete but we keep them for a while and might comment them later -- -- --
@@ -1360,3 +1093,24 @@ function table.sorted(t,...)
sort(t,...)
return t -- still sorts in-place
end
+
+--
+
+function table.values(t,s) -- optional sort flag
+ if t then
+ local values, keys, v = { }, { }, 0
+ for key, value in next, t do
+ if not keys[value] then
+ v = v + 1
+ values[v] = value
+ keys[k] = key
+ end
+ end
+ if s then
+ sort(values)
+ end
+ return values
+ else
+ return { }
+ end
+end
diff --git a/Master/texmf-dist/tex/context/base/l-unicode.lua b/Master/texmf-dist/tex/context/base/l-unicode.lua
index 813ffd54b79..be61f3d73cb 100644
--- a/Master/texmf-dist/tex/context/base/l-unicode.lua
+++ b/Master/texmf-dist/tex/context/base/l-unicode.lua
@@ -6,10 +6,16 @@ if not modules then modules = { } end modules ['l-unicode'] = {
license = "see context related readme files"
}
--- this module will be reorganized
+-- in lua 5.3:
--- todo: utf.sub replacement (used in syst-aux)
+-- utf8.char(···) : concatinated
+-- utf8.charpatt : "[\0-\x7F\xC2-\xF4][\x80-\xBF]*"
+-- utf8.codes(s) : for p, c in utf8.codes(s) do body end
+-- utf8.codepoint(s [, i [, j]])
+-- utf8.len(s [, i])
+-- utf8.offset(s, n [, i])
+-- todo: utf.sub replacement (used in syst-aux)
-- we put these in the utf namespace:
utf = utf or (unicode and unicode.utf8) or { }
@@ -25,7 +31,7 @@ utf.values = utf.values or string.utfvalues
-- string.bytepairs
local type = type
-local char, byte, format, sub = string.char, string.byte, string.format, string.sub
+local char, byte, format, sub, gmatch = string.char, string.byte, string.format, string.sub, string.gmatch
local concat = table.concat
local P, C, R, Cs, Ct, Cmt, Cc, Carg, Cp = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Carg, lpeg.Cp
local lpegmatch, patterns = lpeg.match, lpeg.patterns
@@ -38,13 +44,14 @@ local replacer = lpeg.replacer
local utfvalues = utf.values
local utfgmatch = utf.gmatch -- not always present
-local p_utftype = patterns.utftype
-local p_utfoffset = patterns.utfoffset
-local p_utf8char = patterns.utf8char
-local p_utf8byte = patterns.utf8byte
-local p_utfbom = patterns.utfbom
-local p_newline = patterns.newline
-local p_whitespace = patterns.whitespace
+local p_utftype = patterns.utftype
+local p_utfstricttype = patterns.utfstricttype
+local p_utfoffset = patterns.utfoffset
+local p_utf8char = patterns.utf8char
+local p_utf8byte = patterns.utf8byte
+local p_utfbom = patterns.utfbom
+local p_newline = patterns.newline
+local p_whitespace = patterns.whitespace
if not unicode then
@@ -621,116 +628,273 @@ function utf.magic(f) -- not used
return lpegmatch(p_utftype,str)
end
-local function utf16_to_utf8_be(t)
- if type(t) == "string" then
- t = lpegmatch(utflinesplitter,t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in bytepairs(t[i]) do
- if right then
- local now = 256*left + right
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
- else
- r = r + 1
- result[r] = utfchar(now)
+local utf16_to_utf8_be, utf16_to_utf8_le
+local utf32_to_utf8_be, utf32_to_utf8_le
+
+local utf_16_be_linesplitter = patterns.utfbom_16_be^-1 * lpeg.tsplitat(patterns.utf_16_be_nl)
+local utf_16_le_linesplitter = patterns.utfbom_16_le^-1 * lpeg.tsplitat(patterns.utf_16_le_nl)
+
+-- we have three possibilities:
+
+-- bytepairs: 0.048
+-- gmatch : 0.069
+-- lpeg : 0.089 (match time captures)
+
+if bytepairs then
+
+ -- with a little bit more code we could include the linesplitter
+
+ utf16_to_utf8_be = function(t)
+ if type(t) == "string" then
+ t = lpegmatch(utf_16_be_linesplitter,t)
+ end
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, 0
+ for left, right in bytepairs(t[i]) do
+ if right then
+ local now = 256*left + right
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ more = 0
+ r = r + 1
+ result[r] = utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ else
+ r = r + 1
+ result[r] = utfchar(now)
+ end
end
end
+ t[i] = concat(result,"",1,r) -- we reused tmp, hence t
end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ return t
end
- return t
-end
-local function utf16_to_utf8_le(t)
- if type(t) == "string" then
- t = lpegmatch(utflinesplitter,t)
+ utf16_to_utf8_le = function(t)
+ if type(t) == "string" then
+ t = lpegmatch(utf_16_le_linesplitter,t)
+ end
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, 0
+ for left, right in bytepairs(t[i]) do
+ if right then
+ local now = 256*right + left
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ more = 0
+ r = r + 1
+ result[r] = utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ else
+ r = r + 1
+ result[r] = utfchar(now)
+ end
+ end
+ end
+ t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ end
+ return t
end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in bytepairs(t[i]) do
- if right then
- local now = 256*right + left
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
+
+ utf32_to_utf8_be = function(t)
+ if type(t) == "string" then
+ t = lpegmatch(utflinesplitter,t)
+ end
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, -1
+ for a,b in bytepairs(t[i]) do
+ if a and b then
+ if more < 0 then
+ more = 256*256*256*a + 256*256*b
+ else
+ r = r + 1
+ result[t] = utfchar(more + 256*a + b)
+ more = -1
+ end
else
- r = r + 1
- result[r] = utfchar(now)
+ break
end
end
+ t[i] = concat(result,"",1,r)
end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ return t
end
- return t
-end
-local function utf32_to_utf8_be(t)
- if type(t) == "string" then
- t = lpegmatch(utflinesplitter,t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, -1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more < 0 then
- more = 256*256*256*a + 256*256*b
+ utf32_to_utf8_le = function(t)
+ if type(t) == "string" then
+ t = lpegmatch(utflinesplitter,t)
+ end
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, -1
+ for a,b in bytepairs(t[i]) do
+ if a and b then
+ if more < 0 then
+ more = 256*b + a
+ else
+ r = r + 1
+ result[t] = utfchar(more + 256*256*256*b + 256*256*a)
+ more = -1
+ end
else
- r = r + 1
- result[t] = utfchar(more + 256*a + b)
- more = -1
+ break
end
- else
- break
end
+ t[i] = concat(result,"",1,r)
end
- t[i] = concat(result,"",1,r)
+ return t
end
- return t
-end
-local function utf32_to_utf8_le(t)
- if type(t) == "string" then
- t = lpegmatch(utflinesplitter,t)
+else
+
+ utf16_to_utf8_be = function(t)
+ if type(t) == "string" then
+ t = lpegmatch(utf_16_be_linesplitter,t)
+ end
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, 0
+ for left, right in gmatch(t[i],"(.)(.)") do
+ if left == "\000" then -- experiment
+ r = r + 1
+ result[r] = utfchar(byte(right))
+ elseif right then
+ local now = 256*byte(left) + byte(right)
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ more = 0
+ r = r + 1
+ result[r] = utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ else
+ r = r + 1
+ result[r] = utfchar(now)
+ end
+ end
+ end
+ t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ end
+ return t
end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, -1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more < 0 then
- more = 256*b + a
- else
+
+ utf16_to_utf8_le = function(t)
+ if type(t) == "string" then
+ t = lpegmatch(utf_16_le_linesplitter,t)
+ end
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, 0
+ for left, right in gmatch(t[i],"(.)(.)") do
+ if right == "\000" then
r = r + 1
- result[t] = utfchar(more + 256*256*256*b + 256*256*a)
- more = -1
+ result[r] = utfchar(byte(left))
+ elseif right then
+ local now = 256*byte(right) + byte(left)
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ more = 0
+ r = r + 1
+ result[r] = utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ else
+ r = r + 1
+ result[r] = utfchar(now)
+ end
end
- else
- break
end
+ t[i] = concat(result,"",1,r) -- we reused tmp, hence t
end
- t[i] = concat(result,"",1,r)
+ return t
end
- return t
+
+ utf32_to_utf8_le = function() return { } end -- never used anyway
+ utf32_to_utf8_be = function() return { } end -- never used anyway
+
+ -- the next one is slighty slower
+
+ -- local result, lines, r, more = { }, { }, 0, 0
+ --
+ -- local simple = Cmt(
+ -- C(1) * C(1), function(str,p,left,right)
+ -- local now = 256*byte(left) + byte(right)
+ -- if more > 0 then
+ -- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ -- more = 0
+ -- r = r + 1
+ -- result[r] = utfchar(now)
+ -- elseif now >= 0xD800 and now <= 0xDBFF then
+ -- more = now
+ -- else
+ -- r = r + 1
+ -- result[r] = utfchar(now)
+ -- end
+ -- return p
+ -- end
+ -- )
+ --
+ -- local complex = Cmt(
+ -- C(1) * C(1), function(str,p,left,right)
+ -- local now = 256*byte(left) + byte(right)
+ -- if more > 0 then
+ -- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ -- more = 0
+ -- r = r + 1
+ -- result[r] = utfchar(now)
+ -- elseif now >= 0xD800 and now <= 0xDBFF then
+ -- more = now
+ -- else
+ -- r = r + 1
+ -- result[r] = utfchar(now)
+ -- end
+ -- return p
+ -- end
+ -- )
+ --
+ -- local lineend = Cmt (
+ -- patterns.utf_16_be_nl, function(str,p)
+ -- lines[#lines+1] = concat(result,"",1,r)
+ -- r, more = 0, 0
+ -- return p
+ -- end
+ -- )
+ --
+ -- local be_1 = patterns.utfbom_16_be^-1 * (simple + complex)^0
+ -- local be_2 = patterns.utfbom_16_be^-1 * (lineend + simple + complex)^0
+ --
+ -- utf16_to_utf8_be = function(t)
+ -- if type(t) == "string" then
+ -- local s = t
+ -- lines, r, more = { }, 0, 0
+ -- lpegmatch(be_2,s)
+ -- if r > 0 then
+ -- lines[#lines+1] = concat(result,"",1,r)
+ -- end
+ -- result = { }
+ -- return lines
+ -- else
+ -- for i=1,#t do
+ -- r, more = 0, 0
+ -- lpegmatch(be_1,t[i])
+ -- t[i] = concat(result,"",1,r)
+ -- end
+ -- result = { }
+ -- return t
+ -- end
+ -- end
+
end
-utf.utf32_to_utf8_be = utf32_to_utf8_be
-utf.utf32_to_utf8_le = utf32_to_utf8_le
-utf.utf16_to_utf8_be = utf16_to_utf8_be
utf.utf16_to_utf8_le = utf16_to_utf8_le
+utf.utf16_to_utf8_be = utf16_to_utf8_be
+utf.utf32_to_utf8_le = utf32_to_utf8_le
+utf.utf32_to_utf8_be = utf32_to_utf8_be
function utf.utf8_to_utf8(t)
return type(t) == "string" and lpegmatch(utflinesplitter,t) or t
@@ -777,14 +941,30 @@ end
local _, l_remap = utf.remapper(little)
local _, b_remap = utf.remapper(big)
-function utf.utf8_to_utf16(str,littleendian)
- if littleendian then
- return char(255,254) .. lpegmatch(l_remap,str)
+function utf.utf8_to_utf16_be(str,nobom)
+ if nobom then
+ return lpegmatch(b_remap,str)
else
return char(254,255) .. lpegmatch(b_remap,str)
end
end
+function utf.utf8_to_utf16_le(str,nobom)
+ if nobom then
+ return lpegmatch(l_remap,str)
+ else
+ return char(255,254) .. lpegmatch(l_remap,str)
+ end
+end
+
+function utf.utf8_to_utf16(str,littleendian,nobom)
+ if littleendian then
+ return utf.utf8_to_utf16_le(str,nobom)
+ else
+ return utf.utf8_to_utf16_be(str,nobom)
+ end
+end
+
-- function utf.tocodes(str,separator) -- can be sped up with an lpeg
-- local t, n = { }, 0
-- for u in utfvalues(str) do
@@ -811,6 +991,22 @@ function utf.xstring(s)
return format("0x%05X",type(s) == "number" and s or utfbyte(s))
end
+function utf.toeight(str)
+ if not str then
+ return nil
+ end
+ local utftype = lpegmatch(p_utfstricttype,str)
+ if utftype == "utf-8" then
+ return sub(str,4)
+ elseif utftype == "utf-16-le" then
+ return utf16_to_utf8_le(str)
+ elseif utftype == "utf-16-be" then
+ return utf16_to_utf8_ne(str)
+ else
+ return str
+ end
+end
+
--
local p_nany = p_utf8char / ""
@@ -940,3 +1136,13 @@ if not utf.values then
string.utfvalues = utf.values
end
+
+function utf.chrlen(u) -- u is number
+ return
+ (u < 0x80 and 1) or
+ (u < 0xE0 and 2) or
+ (u < 0xF0 and 3) or
+ (u < 0xF8 and 4) or
+ (u < 0xFC and 5) or
+ (u < 0xFE and 6) or 0
+end
diff --git a/Master/texmf-dist/tex/context/base/l-url.lua b/Master/texmf-dist/tex/context/base/l-url.lua
index 4624a05070d..7bb73125493 100644
--- a/Master/texmf-dist/tex/context/base/l-url.lua
+++ b/Master/texmf-dist/tex/context/base/l-url.lua
@@ -26,6 +26,8 @@ local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replac
-- | ___________|____________ |
-- / \ / \ |
-- urn:example:animal:ferret:nose interpretable as extension
+--
+-- also nice: http://url.spec.whatwg.org/ (maybe some day ...)
url = url or { }
local url = url
@@ -43,7 +45,7 @@ local hexdigit = R("09","AF","af")
local plus = P("+")
local nothing = Cc("")
local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar
-local escaped = (plus / " ") + escapedchar
+local escaped = (plus / " ") + escapedchar -- so no loc://foo++.tex
local noslash = P("/") / ""
@@ -79,12 +81,18 @@ setmetatable(escapes, { __index = function(t,k)
return v
end })
-local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_")^1 + P(1) / escapes)^0) -- space happens most
-local unescaper = Cs((escapedchar + 1)^0)
+local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_")^1 + P(1) / escapes)^0) -- space happens most
+local unescaper = Cs((escapedchar + 1)^0)
+local getcleaner = Cs((P("+++") / "%%2B" + P("+") / "%%20" + P(1))^1)
+
+lpegpatterns.urlunescaped = escapedchar
+lpegpatterns.urlescaper = escaper
+lpegpatterns.urlunescaper = unescaper
+lpegpatterns.urlgetcleaner = getcleaner
-lpegpatterns.urlunescaped = escapedchar
-lpegpatterns.urlescaper = escaper
-lpegpatterns.urlunescaper = unescaper
+function url.unescapeget(str)
+ return lpegmatch(getcleaner,str)
+end
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
@@ -183,7 +191,11 @@ local function hashed(str) -- not yet ok (/test?test)
return s
end
--- inspect(hashed("template://test"))
+-- inspect(hashed("template:///test"))
+-- inspect(hashed("template:///test++.whatever"))
+-- inspect(hashed("template:///test%2B%2B.whatever"))
+-- inspect(hashed("template:///test%x.whatever"))
+-- inspect(hashed("tem%2Bplate:///test%x.whatever"))
-- Here we assume:
--
diff --git a/Master/texmf-dist/tex/context/base/lang-def.mkiv b/Master/texmf-dist/tex/context/base/lang-def.mkiv
index 9f84e90f01c..5c1d6de9cbf 100644
--- a/Master/texmf-dist/tex/context/base/lang-def.mkiv
+++ b/Master/texmf-dist/tex/context/base/lang-def.mkiv
@@ -219,10 +219,9 @@
\c!rightquote=\upperrightsinglesixquote,
\c!leftquotation=\lowerleftdoubleninequote,
\c!rightquotation=\upperrightdoublesixquote,
- \c!date={\v!day,\fourperemspace,\v!month,\space,\v!year}
+ \c!date={\v!day,{.\fourperemspace},\v!month,\space,\v!year},
\s!lefthyphenmin=2,
- \s!righthyphenmin=3
-]
+ \s!righthyphenmin=3]
\installlanguage
[\s!sk]
@@ -235,10 +234,9 @@
\c!rightquote=\upperrightsinglesixquote,
\c!leftquotation=\lowerleftdoubleninequote,
\c!rightquotation=\upperrightdoublesixquote,
- \c!date={\v!day,\fourperemspace,\v!month,\space,\v!year}
+ \c!date={\v!day,{.\fourperemspace},\v!month,\space,\v!year},
\s!lefthyphenmin=2,
- \s!righthyphenmin=3
-]
+ \s!righthyphenmin=3]
\installlanguage
[\s!hr]
@@ -271,7 +269,7 @@
\installlanguage [\s!slovak] [\s!sk]
\installlanguage [\s!croatian] [\s!hr]
\installlanguage [\s!slovenian] [\s!sl]
-\installlanguage [slovene] [\s!sl] % both possible (mojca: still needed?)
+\installlanguage [slovene] [\s!sl] % both possible (mojca: still needed?)
\def\doconvertsloveniancharacters{\dodoconvertcharacters{25}}
@@ -400,7 +398,7 @@
\c!rightquote=\upperrightsingleninequote,
\c!leftquotation=\upperleftdoublesixquote,
\c!rightquotation=\upperrightdoubleninequote,
- \c!date={\v!year,\space,\v!month,\space,\v!day}
+ \c!date={\v!year,\space,\v!month,\space,\v!day},
\s!patterns=\s!tk,
\s!lefthyphenmin=1,
\s!righthyphenmin=2]
@@ -636,6 +634,10 @@
\c!rightquotation=\upperrightdoubleninequote,
\c!date={\v!year,\space,\v!month,\space,\v!day}]
+\installlanguage[\s!pt-br][\c!default=\s!pt] % Brazil
+\installlanguage[\s!es-es][\c!default=\s!es] % Spain
+\installlanguage[\s!es-la][\c!default=\s!es] % Latin America
+
\installlanguage
[\s!ro]
[\c!spacing=\v!packed,
@@ -698,6 +700,26 @@
\installlanguage [\s!thai] [\s!th]
+%D Malayalam (needs to be checked)
+
+\installlanguage
+ [\s!ml]
+ [\c!spacing=\v!broad,
+ \c!leftsentence=\emdash,
+ \c!rightsentence=\emdash,
+ \c!leftsubsentence=\emdash,
+ \c!rightsubsentence=\emdash,
+ \c!leftquote=\upperleftsinglesixquote,
+ \c!rightquote=\upperrightsingleninequote,
+ \c!leftquotation=\upperleftdoublesixquote,
+ \c!rightquotation=\upperrightdoubleninequote,
+ \c!date={\v!month,\space,\v!day,{,\space},\v!year},
+ \s!patterns=\s!ml,
+ \s!lefthyphenmin=2,
+ \s!righthyphenmin=3]
+
+\installlanguage [\s!malayalam] [\s!ml]
+
%D Todo: generate this one from languages.data
\installlanguage[nld][\s!nl]
diff --git a/Master/texmf-dist/tex/context/base/lang-ini.lua b/Master/texmf-dist/tex/context/base/lang-ini.lua
index 64221325338..a9f428caa97 100644
--- a/Master/texmf-dist/tex/context/base/lang-ini.lua
+++ b/Master/texmf-dist/tex/context/base/lang-ini.lua
@@ -20,7 +20,6 @@ local type, tonumber = type, tonumber
local utfbyte = utf.byte
local format, gsub = string.format, string.gsub
local concat, sortedkeys, sortedpairs = table.concat, table.sortedkeys, table.sortedpairs
-local lpegmatch = lpeg.match
local settings_to_array = utilities.parsers.settings_to_array
diff --git a/Master/texmf-dist/tex/context/base/lang-ini.mkiv b/Master/texmf-dist/tex/context/base/lang-ini.mkiv
index 1297fe23ebc..4ed7839bddc 100644
--- a/Master/texmf-dist/tex/context/base/lang-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/lang-ini.mkiv
@@ -374,15 +374,24 @@
\fi
\lang_basics_synchronize_min_max}
-\unexpanded\def\nohyphens % % % % % not clever, we still hyphenate but supress application
+% \unexpanded\def\nohyphens % % % % % not clever, we still hyphenate but supress application
+% {\ifx\dohyphens\relax
+% \unexpanded\edef\dohyphens
+% {\hyphenpenalty \the\hyphenpenalty
+% \exhyphenpenalty\the\exhyphenpenalty
+% \relax}%
+% \fi
+% \hyphenpenalty \plustenthousand
+% \exhyphenpenalty\plustenthousand}
+
+\unexpanded\def\nohyphens % nicer for url's
{\ifx\dohyphens\relax
\unexpanded\edef\dohyphens
- {\hyphenpenalty \the\hyphenpenalty
- \exhyphenpenalty\the\exhyphenpenalty
- \relax}%
+ {\hyphenminoffset\the\hyphenminoffset\relax
+ \lang_basics_synchronize_min_max}%
\fi
- \hyphenpenalty \plustenthousand
- \exhyphenpenalty\plustenthousand}
+ \hyphenminoffset\plusthousand
+ \lang_basics_synchronize_min_max}
\let\dohyphens\relax
@@ -470,7 +479,7 @@
\lang_basics_switch_asked}
\unexpanded\def\language
- {\doifnextoptionalelse\lang_basics_set_current\normallanguage}
+ {\doifnextoptionalcselse\lang_basics_set_current\normallanguage}
\newcount\mainlanguagenumber
@@ -492,7 +501,11 @@
\fi
\mainlanguagenumber\normallanguage}
-%D New (see nomarking and nolist):
+\appendtoks
+ \normallanguage\mainlanguagenumber
+\to \everybeforepagebody
+
+%D Used at all?
\def\splitsequence#1#2%
{\doifelse{#1}\v!no{#2}{\doifelse{#1}\v!yes{\languageparameter\c!limittext}{#1}}}
diff --git a/Master/texmf-dist/tex/context/base/lang-lab.mkiv b/Master/texmf-dist/tex/context/base/lang-lab.mkiv
index 1ddb44cbb79..7dcaaecb414 100644
--- a/Master/texmf-dist/tex/context/base/lang-lab.mkiv
+++ b/Master/texmf-dist/tex/context/base/lang-lab.mkiv
@@ -94,8 +94,10 @@
\csname\??label\currentlabelcategory#1:##1:##2\endcsname
\else\ifcsname\??label#1:##1:##2\endcsname
\csname\??label#1:##1:##2\endcsname
- \else\ifcsname\??language#4\s!default\endcsname
- \expandafter#5\csname\??language#4\s!default\endcsname{##2}%
+% \else\ifcsname\??language#4\s!default\endcsname
+% \expandafter#5\csname\??language#4\s!default\endcsname{##2}%
+ \else\ifcsname\??language##1\s!default\endcsname
+ \expandafter#5\csname\??language##1\s!default\endcsname{##2}%
\else\ifcsname\??label\currentlabelcategory#1:##2\endcsname
\csname\??label\currentlabelcategory#1:##2\endcsname
\else\ifcsname\??label#1:##2\endcsname
@@ -178,7 +180,7 @@
\grabuntil{stop#1text}\lang_labels_text_prefix_start_indeed}
\def\lang_labels_text_prefix_start_indeed#1% text (not special checking done here yet, only for long texts anyway)
- {\expandafter\edef\csname\??label\currenttextprefixclass:\currenttextprefixtag:\currenttextprefixname\endcsname{{\ctxlua{context(string.strip(\!!bs#1\!!es))}}\empty}}
+ {\expandafter\edef\csname\??label\currenttextprefixclass:\currenttextprefixtag:\currenttextprefixname\endcsname{{\ctxcommand{strip(\!!bs#1\!!es)}}\empty}}
\def\lang_labels_text_prefix_setup[#1][#2]%
{\ifsecondargument
diff --git a/Master/texmf-dist/tex/context/base/lang-mis.mkiv b/Master/texmf-dist/tex/context/base/lang-mis.mkiv
index 4d8b8e08aeb..0c4bc3ac48e 100644
--- a/Master/texmf-dist/tex/context/base/lang-mis.mkiv
+++ b/Master/texmf-dist/tex/context/base/lang-mis.mkiv
@@ -157,9 +157,9 @@
%D In the main \CONTEXT\ modules these can be tuned by a setup
%D command. Watch the (maybe) better looking compound hyphen.
-\ifx\compoundhyphen \undefined \def\compoundhyphen {\hbox{-\kern-.25ex-}} \fi
-\ifx\beginofsubsentence\undefined \def\beginofsubsentence{\hbox{---}} \fi
-\ifx\endofsubsentence \undefined \def\endofsubsentence {\hbox{---}} \fi
+\ifx\compoundhyphen \undefined \unexpanded\def\compoundhyphen {\hbox{-\kern-.25ex-}} \fi
+\ifx\beginofsubsentence\undefined \unexpanded\def\beginofsubsentence{\hbox{\emdash}} \fi
+\ifx\endofsubsentence \undefined \unexpanded\def\endofsubsentence {\hbox{\emdash}} \fi
%D The last two variables are needed for subsentences
%D |<|like this one|>| which we did not yet mention.
@@ -168,10 +168,11 @@
%D compound characters like |-| or || to be separated from the
%D words. \TEX\ hackers will recognise the next two macro's:
-\ifx\prewordbreak \undefined \def\prewordbreak {\penalty\plustenthousand\hskip\zeropoint\relax} \fi
-%ifx\postwordbreak\undefined \def\postwordbreak {\penalty\zerocount \prewordbreak } \fi
-\ifx\postwordbreak\undefined \def\postwordbreak {\penalty\zerocount \hskip\zeropoint\relax} \fi
-\ifx\hspaceamount \undefined \def\hspaceamount#1#2{.16667em} \fi % language specific
+\ifx\prewordbreak \undefined \unexpanded\def\prewordbreak {\penalty\plustenthousand\hskip\zeropoint\relax} \fi
+\ifx\postwordbreak\undefined \unexpanded\def\postwordbreak {\penalty\zerocount \hskip\zeropoint\relax} \fi
+\ifx\hspaceamount \undefined \def\hspaceamount#1#2{.16667\emwidth} \fi % language specific
+
+\unexpanded\def\permithyphenation{\ifhmode\prewordbreak\fi} % doesn't remove spaces
%D \macros
%D {beginofsubsentencespacing,endofsubsentencespacing}
@@ -201,21 +202,21 @@
\installcorenamespace{discretionarymath}
\installcorenamespace{discretionaryboth}
-\def\installdiscretionary#1#2%
+\unexpanded\def\installdiscretionary#1#2%
{\setevalue{\??discretionarymath\detokenize{#1}}{\detokenize{#1}}% ?
\setvalue {\??discretionarytext\detokenize{#1}}{#2}%
\setvalue {\??discretionaryboth\detokenize{#1}}{\lang_discretionaries_command#1}%
\scratchcounter\expandafter`\detokenize{#1}%
\expandafter\uedcatcodecommand\expandafter\ctxcatcodes\expandafter\scratchcounter\csname\??discretionaryboth\detokenize{#1}\endcsname}
-\def\handlemathmodediscretionary#1{\executeifdefined{\??discretionarymath\detokenize{#1}}\donothing}
-\def\handletextmodediscretionary#1{\executeifdefined{\??discretionarytext\detokenize{#1}}\donothing}
+\unexpanded\def\handlemathmodediscretionary#1{\executeifdefined{\??discretionarymath\detokenize{#1}}\donothing}
+\unexpanded\def\handletextmodediscretionary#1{\executeifdefined{\??discretionarytext\detokenize{#1}}\donothing}
-\def\installdiscretionaries#1#2{\writestatus\m!system{use \string \installdiscretionary}} % obsolete
+\unexpanded\def\installdiscretionaries#1#2{\writestatus\m!system{use \string \installdiscretionary}} % obsolete
\setnewconstant\discretionarymode\plusone
-\def\ignorediscretionaries
+\unexpanded\def\ignorediscretionaries
{\discretionarymode\zerocount}
\def\lang_discretionaries_command
@@ -277,7 +278,7 @@
\futurelet\nextnext\next}%
\next}
-\def\activedododotextmodediscretionary#1#2%
+\unexpanded\def\activedododotextmodediscretionary#1#2%
{\edef\discretionarytoken{\detokenize{#2}}%
\def\textmodediscretionary{\handletextmodediscretionary{#1}}%
\lang_discretionaries_check_after
@@ -329,7 +330,7 @@
%D In those situations where the nature of characters is
%D less predictable, we can use the more direct approach:
-\def\directdiscretionary
+\unexpanded\def\directdiscretionary
{\csname
\ifcase\discretionarymode
\strippedcsname\lang_discretionaries_process_none
@@ -338,7 +339,7 @@
\fi
\endcsname}
-\def\indirectdiscretionary
+\unexpanded\def\indirectdiscretionary
{\csname
\ifcase\discretionarymode
\strippedcsname\lang_discretionaries_process_none
@@ -352,7 +353,7 @@
\let\textmodediscretionary\compoundhyphen
\executeifdefined{\??discretionaryaction\discretionarytoken}{\indirectdiscretionary{#1}}}
-\unexpanded\def\lang_discretionaries_process_indirect#1%
+\unexpanded\unexpanded\def\lang_discretionaries_process_indirect#1%
{\prewordbreak\discretionary{\hbox{#1}}{}{\hbox{#1}}\allowbreak\postwordbreak}
\unexpanded\def\definetextmodediscretionary #1
diff --git a/Master/texmf-dist/tex/context/base/lang-rep.lua b/Master/texmf-dist/tex/context/base/lang-rep.lua
new file mode 100644
index 00000000000..02eb59f487c
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/lang-rep.lua
@@ -0,0 +1,282 @@
+if not modules then modules = { } end modules ['lang-rep'] = {
+ version = 1.001,
+ comment = "companion to lang-rep.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- A BachoTeX 2013 experiment, probably not that useful. Eventually I used a simpler
+-- more generic example. I'm sure no one ever notices of even needs this code.
+--
+-- As a follow up on a question by Alan about special treatment of dropped caps I wonder
+-- if I can make this one more clever (probably in a few more dev steps). For instance
+-- injecting nodes or replacing nodes. It's a prelude to a kind of lpeg for nodes,
+-- although (given experiences so far) we don't really need that. After all, each problem
+-- is somewhat unique.
+
+local type = type
+local utfbyte, utfsplit = utf.byte, utf.split
+local P, C, U, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.patterns.utf8character, lpeg.Cc, lpeg.Ct, lpeg.match
+local find = string.find
+
+local grouped = P("{") * ( Ct((U/utfbyte-P("}"))^1) + Cc(false) ) * P("}")-- grouped
+local splitter = Ct((Ct(Cc("discretionary") * grouped * grouped * grouped) + U/utfbyte)^1)
+
+local trace_replacements = false trackers.register("languages.replacements", function(v) trace_replacements = v end)
+local trace_detail = false trackers.register("languages.replacements.detail", function(v) trace_detail = v end)
+
+local report_replacement = logs.reporter("languages","replacements")
+
+local glyph_code = nodes.nodecodes.glyph
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getattr = nuts.getattr
+local getid = nuts.getid
+local getchar = nuts.getchar
+
+local insert_node_before = nuts.insert_before
+local remove_node = nuts.remove
+local copy_node = nuts.copy
+local flush_list = nuts.flush_list
+local insert_after = nuts.insert_after
+
+local nodepool = nuts.pool
+local new_glyph = nodepool.glyph
+local new_disc = nodepool.disc
+
+local texsetattribute = tex.setattribute
+local unsetvalue = attributes.unsetvalue
+
+local v_reset = interfaces.variables.reset
+
+local replacements = languages.replacements or { }
+languages.replacements = replacements
+
+local a_replacements = attributes.private("replacements")
+
+local lists = { }
+local last = 0
+local trees = { }
+
+table.setmetatableindex(lists,function(lists,name)
+ last = last + 1
+ local list = { }
+ local data = { name = name, list = list, attribute = last }
+ lists[last] = data
+ lists[name] = data
+ trees[last] = list
+ return data
+end)
+
+-- todo: glue kern
+
+local function add(root,word,replacement)
+ local list = utfsplit(word,true)
+ local size = #list
+ for i=1,size do
+ local l = utfbyte(list[i])
+ if not root[l] then
+ root[l] = { }
+ end
+ if i == size then
+ -- local newlist = utfsplit(replacement,true)
+ -- for i=1,#newlist do
+ -- newlist[i] = utfbyte(newlist[i])
+ -- end
+ local special = find(replacement,"{",1,true)
+ local newlist = lpegmatch(splitter,replacement)
+ --
+ root[l].final = {
+ word = word,
+ replacement = replacement,
+ oldlength = size,
+ newcodes = newlist,
+ special = special,
+ }
+ end
+ root = root[l]
+ end
+end
+
+function replacements.add(category,word,replacement)
+ local root = lists[category].list
+ if type(word) == "table" then
+ for word, replacement in next, word do
+ add(root,word,replacement)
+ end
+ else
+ add(root,word,replacement or "")
+ end
+end
+
+local function hit(a,head)
+ local tree = trees[a]
+ if tree then
+ local root = tree[getchar(head)]
+ if root then
+ local current = getnext(head)
+ local lastrun = false
+ local lastfinal = false
+ while current and getid(current) == glyph_code do
+ local newroot = root[getchar(current)]
+ if not newroot then
+ return lastrun, lastfinal
+ else
+ local final = newroot.final
+ if final then
+ if trace_detail then
+ report_replacement("hitting word %a, replacement %a",final.word,final.replacement)
+ end
+ lastrun = current
+ lastfinal = final
+ else
+ root = newroot
+ end
+ end
+ current = getnext(current)
+ end
+ if lastrun then
+ return lastrun, lastfinal
+ end
+ end
+ end
+end
+
+local function tonodes(list,template)
+ local head, current
+ for i=1,#list do
+ local new = copy_node(template)
+ setfield(new,"char",list[i])
+ if head then
+ head, current = insert_after(head,current,new)
+ else
+ head, current = new, new
+ end
+ end
+ return head
+end
+
+
+function replacements.handler(head)
+ head = tonut(head)
+ local current = head
+ local done = false
+ while current do
+ if getid(current) == glyph_code then
+ local a = getattr(current,a_replacements)
+ if a then
+ local last, final = hit(a,current)
+ if last then
+ local oldlength = final.oldlength
+ local newcodes = final.newcodes
+ local newlength = #newcodes
+ if trace_replacement then
+ report_replacement("replacing word %a by %a",final.word,final.replacement)
+ end
+ if final.special then
+ -- easier is to delete and insert (a simple callout to tex would be more efficient)
+ -- maybe just walk over a replacement string instead
+ local prev = getprev(current)
+ local next = getnext(last)
+ local list = current
+ setfield(last,"next",nil)
+ setfield(prev,"next",next)
+ if next then
+ setfield(next,"prev",prev)
+ end
+ current = prev
+ if not current then
+ head = nil
+ end
+ for i=1,newlength do
+ local codes = newcodes[i]
+ local new = nil
+ if type(codes) == "table" then
+ local method = codes[1]
+ if method == "discretionary" then
+ local pre, post, replace = codes[2], codes[3], codes[4]
+ new = new_disc()
+ if pre then
+ setfield(new,"pre",tonodes(pre,last))
+ end
+ if post then
+ setfield(new,"post",tonodes(post,last))
+ end
+ if replace then
+ setfield(new,"replace",tonodes(replace,last))
+ end
+ else
+ -- todo
+ end
+ else
+ new = copy_node(last)
+ setfield(new,"char",codes)
+ end
+ if new then
+ head, current = insert_after(head,current,new)
+ end
+ end
+ flush_list(list)
+ elseif oldlength == newlength then -- #old == #new
+ for i=1,newlength do
+ setfield(current,"char",newcodes[i])
+ current = getnext(current)
+ end
+ elseif oldlength < newlength then -- #old < #new
+ for i=1,newlength-oldlength do
+ local n = copy_node(current)
+ setfield(n,"char",newcodes[i])
+ head, current = insert_node_before(head,current,n)
+ current = getnext(current)
+ end
+ for i=newlength-oldlength+1,newlength do
+ setfield(current,"char",newcodes[i])
+ current = getnext(current)
+ end
+ else -- #old > #new
+ for i=1,oldlength-newlength do
+ head, current = remove_node(head,current,true)
+ end
+ for i=1,newlength do
+ setfield(current,"char",newcodes[i])
+ current = getnext(current)
+ end
+ end
+ done = true
+ end
+ end
+ end
+ current = getnext(current)
+ end
+ return tonode(head), done
+end
+
+local enabled = false
+
+function replacements.set(n) -- number or 'reset'
+ if n == v_reset then
+ n = unsetvalue
+ else
+ n = lists[n].attribute
+ if not enabled then
+ nodes.tasks.enableaction("processors","languages.replacements.handler")
+ if trace_replacements then
+ report_replacement("enabling replacement handler")
+ end
+ enabled = true
+ end
+ end
+ texsetattribute(a_replacements,n)
+end
+
+-- interface
+
+commands.setreplacements = replacements.set
+commands.addreplacements = replacements.add
diff --git a/Master/texmf-dist/tex/context/base/lang-rep.mkiv b/Master/texmf-dist/tex/context/base/lang-rep.mkiv
new file mode 100644
index 00000000000..b3f21f22a4f
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/lang-rep.mkiv
@@ -0,0 +1,75 @@
+%D \module
+%D [ file=lang-rep,
+%D version=2013.04.28,
+%D title=\CONTEXT\ Language Macros,
+%D subtitle=Substitution,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D As I needed an example of messing with nodes for the bacho\TEX\ tutorial
+%D I cooked up this. In the end I decided to stick to a simpler example and
+%D just finished this off in case someone really needs it.
+
+\writestatus{loading}{ConTeXt Language Macros / Replacements}
+
+\unprotect
+
+\registerctxluafile{lang-rep}{1.001}
+
+\definesystemattribute[replacements][public]
+
+%D \startluacode
+%D
+%D -- todo: other nodes (prelude to more experiments with auto dropped caps)
+%D
+%D languages.replacements.add("basics", {
+%D ["aap"] = "monkey",
+%D ["noot"] = "nut",
+%D ["never"] = "forever",
+%D ["newer"] = "cooler",
+%D ["new"] = "cool",
+%D -- ["special"] = "veryspe{>>>}{<<<}{=}cial",
+%D })
+%D
+%D \stopluacode
+%D
+%D \replaceword[more][this][that]
+%D \replaceword[more][crap][support]
+%D \replaceword[more][---][—]
+%D \replaceword[basics][special][veryspe{>>>}{<<<}{=}cial]
+%D
+%D \starttyping
+%D \start \setreplacements[basics] What the heck, it's now or never, isn't it new? \par \stop
+%D \start \setreplacements[more] Do we --- {\it really} --- need this kind of crap? \par \stop
+%D \start \setreplacements[basics] All kinds of special thingies! \par \stop
+%D \start \setreplacements[basics] \hsize1mm special \par \stop
+%D \stoptyping
+
+\unexpanded\def\setreplacements[#1]%
+ {\ctxcommand{setreplacements("#1")}}
+
+\unexpanded\def\resetreplacements
+ {\attribute\replacementsattribute\attributeunsetvalue}
+
+\unexpanded\def\replaceword
+ {\dotripleargument\languages_replacements_replace}
+
+\unexpanded\def\languages_replacements_replace[#1][#2][#3]%
+ {\ifthirdargument
+ \ctxcommand{addreplacements("#1",\!!bs#2\!!es,\!!bs#3\!!es)}%
+ \fi}
+
+\appendtoks
+ \resetreplacements
+\to \everyresettypesetting
+
+\appendtoks
+ \resetreplacements
+\to \everyinitializeverbatim
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/lang-url.lua b/Master/texmf-dist/tex/context/base/lang-url.lua
index 35381e672c3..4ed5cdea10d 100644
--- a/Master/texmf-dist/tex/context/base/lang-url.lua
+++ b/Master/texmf-dist/tex/context/base/lang-url.lua
@@ -8,11 +8,11 @@ if not modules then modules = { } end modules ['lang-url'] = {
local utfcharacters, utfvalues, utfbyte, utfchar = utf.characters, utf.values, utf.byte, utf.char
-context = context
-
commands = commands or { }
local commands = commands
+context = context
+
--[[
Hyphenating 's is somewhat tricky and a matter of taste. I did
consider using a dedicated hyphenation pattern or dealing with it by node
diff --git a/Master/texmf-dist/tex/context/base/lang-wrd.lua b/Master/texmf-dist/tex/context/base/lang-wrd.lua
index 06a2311a63e..5fc23757ef9 100644
--- a/Master/texmf-dist/tex/context/base/lang-wrd.lua
+++ b/Master/texmf-dist/tex/context/base/lang-wrd.lua
@@ -26,7 +26,18 @@ words.threshold = 4
local numbers = languages.numbers
local registered = languages.registered
-local traverse_nodes = node.traverse
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local setattr = nuts.setattr
+
+local traverse_nodes = nuts.traverse
+
local wordsdata = words.data
local chardata = characters.data
local tasks = nodes.tasks
@@ -96,7 +107,7 @@ end
-- there is an n=1 problem somewhere in nested boxes
local function mark_words(head,whenfound) -- can be optimized and shared
- local current, language, done = head, nil, nil, 0, false
+ local current, language, done = tonut(head), nil, nil, 0, false
local str, s, nds, n = { }, 0, { }, 0 -- n could also be a table, saves calls
local function action()
if s > 0 then
@@ -112,9 +123,9 @@ local function mark_words(head,whenfound) -- can be optimized and shared
n, s = 0, 0
end
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local a = current.lang
+ local a = getfield(current,"lang")
if a then
if a ~= language then
if s > 0 then
@@ -126,16 +137,16 @@ local function mark_words(head,whenfound) -- can be optimized and shared
action()
language = a
end
- local components = current.components
+ local components = getfield(current,"components")
if components then
n = n + 1
nds[n] = current
for g in traverse_nodes(components) do
s = s + 1
- str[s] = utfchar(g.char)
+ str[s] = utfchar(getchar(g))
end
else
- local code = current.char
+ local code = getchar(current)
local data = chardata[code]
if is_letter[data.category] then
n = n + 1
@@ -151,12 +162,12 @@ local function mark_words(head,whenfound) -- can be optimized and shared
n = n + 1
nds[n] = current
end
- elseif id == kern_code and current.subtype == kerning_code and s > 0 then
+ elseif id == kern_code and getsubtype(current) == kerning_code and s > 0 then
-- ok
elseif s > 0 then
action()
end
- current = current.next
+ current = getnext(current)
end
if s > 0 then
action()
@@ -176,6 +187,8 @@ local enabled = false
function words.check(head)
if enabled then
return methods[wordmethod](head)
+ elseif not head then
+ return head, false
else
return head, false
end
@@ -207,7 +220,7 @@ table.setmetatableindex(cache, function(t,k) -- k == language, numbers[k] == tag
else
c = colist["word:" .. (numbers[k] or "unset")] or colist["word:unknown"]
end
- local v = c and function(n) n[a_color] = c end or false
+ local v = c and function(n) setattr(n,a_color,c) end or false
t[k] = v
return v
end)
@@ -226,7 +239,7 @@ end
methods[1] = function(head)
for n in traverse_nodes(head) do
- n[a_color] = unsetvalue -- hm, not that selective (reset color)
+ setattr(n,a_color,unsetvalue) -- hm, not that selective (reset color)
end
return mark_words(head,sweep)
end
@@ -327,24 +340,24 @@ end
methods[3] = function(head)
for n in traverse_nodes(head) do
- n[a_color] = unsetvalue
+ setattr(n,a_color,unsetvalue)
end
return mark_words(head,sweep)
end
-- for the moment we hook it into the attribute handler
---~ languagehacks = { }
+-- languagehacks = { }
---~ function languagehacks.process(namespace,attribute,head)
---~ return languages.check(head)
---~ end
+-- function languagehacks.process(namespace,attribute,head)
+-- return languages.check(head)
+-- end
---~ chars.plugins[chars.plugins+1] = {
---~ name = "language",
---~ namespace = languagehacks,
---~ processor = languagehacks.process
---~ }
+-- chars.plugins[chars.plugins+1] = {
+-- name = "language",
+-- namespace = languagehacks,
+-- processor = languagehacks.process
+-- }
-- interface
diff --git a/Master/texmf-dist/tex/context/base/layo-ini.lua b/Master/texmf-dist/tex/context/base/layo-ini.lua
index 56ced2c0bc8..d35d7ef6965 100644
--- a/Master/texmf-dist/tex/context/base/layo-ini.lua
+++ b/Master/texmf-dist/tex/context/base/layo-ini.lua
@@ -6,17 +6,10 @@ if not modules then modules = { } end modules ['layo-ini'] = {
license = "see context related readme files"
}
--- We need to share information between the TeX and Lua end
--- about the typographical model. This happens here.
---
--- Code might move.
+-- We need to share information between the TeX and Lua end about the typographical
+-- model. This happens here. This code might move.
--- conditionals.layoutisdoublesided
--- conditionals.layoutissinglesided
--- texcount.pagenoshift
--- texcount.realpageno
-
-local texcount = tex.count
+local texgetcount = tex.getcount
local conditionals = tex.conditionals
layouts = {
@@ -33,14 +26,14 @@ function status.leftorrightpagection(left,right)
return left, right
elseif conditionals.layoutissinglesided then
return left, right
- elseif texcount.pagenoshift % 2 == 0 then
- if texcount.realpageno % 2 == 0 then
+ elseif texgetcount("pagenoshift") % 2 == 0 then
+ if texgetcount("realpageno") % 2 == 0 then
return right, left
else
return left, right
end
else
- if texcount.realpageno % 2 == 0 then
+ if texgetcount("realpageno") % 2 == 0 then
return left, right
else
return right, left
@@ -53,9 +46,9 @@ function status.isleftpage()
return false
elseif conditionals.layoutissinglesided then
return false
- elseif texcount.pagenoshift % 2 == 0 then
- return texcount.realpageno % 2 == 0
+ elseif texgetcount("pagenoshift") % 2 == 0 then
+ return texgetcount("realpageno") % 2 == 0
else
- return not texcount.realpageno % 2 == 0
+ return not texgetcount("realpageno") % 2 == 0
end
end
diff --git a/Master/texmf-dist/tex/context/base/lpdf-ano.lua b/Master/texmf-dist/tex/context/base/lpdf-ano.lua
index adfea3812da..827c43ec609 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-ano.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-ano.lua
@@ -10,47 +10,68 @@ if not modules then modules = { } end modules ['lpdf-ano'] = {
-- todo: /AA << WC << ... >> >> : WillClose actions etc
-local next, tostring = next, tostring
-local rep, format = string.rep, string.format
-local texcount = tex.count
+-- internal references are indicated by a number (and turned into )
+-- we only flush internal destinations that are referred
+
+local next, tostring, tonumber, rawget = next, tostring, tonumber, rawget
+local rep, format, find = string.rep, string.format, string.find
+local min = math.min
local lpegmatch = lpeg.match
local formatters = string.formatters
local backends, lpdf = backends, lpdf
-local trace_references = false trackers.register("references.references", function(v) trace_references = v end)
-local trace_destinations = false trackers.register("references.destinations", function(v) trace_destinations = v end)
-local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end)
+local trace_references = false trackers.register("references.references", function(v) trace_references = v end)
+local trace_destinations = false trackers.register("references.destinations", function(v) trace_destinations = v end)
+local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end)
+
+local log_destinations = false directives.register("destinations.log", function(v) log_destinations = v end)
-local report_reference = logs.reporter("backend","references")
-local report_destination = logs.reporter("backend","destinations")
-local report_bookmark = logs.reporter("backend","bookmarks")
+local report_reference = logs.reporter("backend","references")
+local report_destination = logs.reporter("backend","destinations")
+local report_bookmark = logs.reporter("backend","bookmarks")
local variables = interfaces.variables
-local constants = interfaces.constants
+local v_auto = variables.auto
+local v_page = variables.page
+
+local factor = number.dimenfactors.bp
local settings_to_array = utilities.parsers.settings_to_array
+local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
+
local nodeinjections = backends.pdf.nodeinjections
local codeinjections = backends.pdf.codeinjections
local registrations = backends.pdf.registrations
+local getpos = codeinjections.getpos
+local gethpos = codeinjections.gethpos
+local getvpos = codeinjections.getvpos
+
local javascriptcode = interactions.javascripts.code
local references = structures.references
local bookmarks = structures.bookmarks
+local flaginternals = references.flaginternals
+local usedinternals = references.usedinternals
+local usedviews = references.usedviews
+
local runners = references.runners
local specials = references.specials
local handlers = references.handlers
local executers = references.executers
-local getinnermethod = references.getinnermethod
local nodepool = nodes.pool
-local pdfannotation_node = nodepool.pdfannotation
-local pdfdestination_node = nodepool.pdfdestination
-local latelua_node = nodepool.latelua
+----- pdfannotation_node = nodepool.pdfannotation
+----- pdfdestination_node = nodepool.pdfdestination
+----- latelua_node = nodepool.latelua
+local latelua_function_node = nodepool.lateluafunction -- still node ... todo
+
+local texgetcount = tex.getcount
local pdfdictionary = lpdf.dictionary
local pdfarray = lpdf.array
@@ -62,7 +83,12 @@ local pdfshareobjectreference = lpdf.shareobjectreference
local pdfreserveobject = lpdf.reserveobject
local pdfpagereference = lpdf.pagereference
local pdfdelayedobject = lpdf.delayedobject
-local pdfregisterannotation = lpdf.registerannotation
+local pdfregisterannotation = lpdf.registerannotation -- forward definition (for the moment)
+local pdfnull = lpdf.null
+local pdfaddtocatalog = lpdf.addtocatalog
+local pdfaddtonames = lpdf.addtonames
+local pdfaddtopageattributes = lpdf.addtopageattributes
+local pdfrectangle = lpdf.rectangle
-- todo: 3dview
@@ -78,102 +104,417 @@ local pdf_t = pdfconstant("T")
local pdf_fit = pdfconstant("Fit")
local pdf_named = pdfconstant("Named")
-local pdf_border = pdfarray { 0, 0, 0 }
+local autoprefix = "#"
-local cache = { }
+-- Bah, I hate this kind of features .. anyway, as we have delayed resolving we
+-- only support a document-wide setup and it has to be set before the first one
+-- is used. Also, we default to a non-intrusive gray and the outline is kept
+-- thin without dashing lines. This is as far as I'm prepared to go. This way
+-- it can also be used as a debug feature.
-local function pagedestination(n) -- only cache fit
- if n > 0 then
- local pd = cache[n]
- if not pd then
- local a = pdfarray {
- pdfreference(pdfpagereference(n)),
- pdf_fit,
- }
- pd = pdfshareobjectreference(a)
- cache[n] = pd
+local pdf_border_style = pdfarray { 0, 0, 0 } -- radius radius linewidth
+local pdf_border_color = nil
+local set_border = false
+
+function pdfborder()
+ border_set = true
+ return pdf_border_style, pdf_border_color
+end
+
+lpdf.border = pdfborder
+
+directives.register("references.border",function(v)
+ if v and not set_border then
+ if type(v) == "string" then
+ local m = attributes.list[attributes.private('color')] or { }
+ local c = m and m[v]
+ local v = c and attributes.colors.value(c)
+ if v then
+ local r, g, b = v[3], v[4], v[5]
+ -- if r == g and g == b then
+ -- pdf_border_color = pdfarray { r } -- reduced, not not ... bugged viewers
+ -- else
+ pdf_border_color = pdfarray { r, g, b } -- always rgb
+ -- end
+ end
+ end
+ if not pdf_border_color then
+ pdf_border_color = pdfarray { .6, .6, .6 } -- no reduce to { 0.6 } as there are buggy viewers out there
end
- return pd
+ pdf_border_style = pdfarray { 0, 0, .5 } -- < 0.5 is not show by acrobat (at least not in my version)
end
-end
+end)
+
+-- the used and flag code here is somewhat messy in the sense
+-- that it belongs in strc-ref but at the same time depends on
+-- the backend so we keep it here
+
+-- the caching is somewhat memory intense on the one hand but
+-- it saves many small temporary tables so it might pay off
+
+local pagedestinations = allocate()
+local pagereferences = allocate() -- annots are cached themselves
+
+setmetatableindex(pagedestinations, function(t,k)
+ k = tonumber(k)
+ local v = rawget(t,k)
+ if v then
+ -- report_reference("page number expected, got %s: %a",type(k),k)
+ return v
+ end
+ local v = k > 0 and pdfarray {
+ pdfreference(pdfpagereference(k)),
+ pdf_fit,
+ } or pdfnull()
+ t[k] = v
+ return v
+end)
+
+setmetatableindex(pagereferences,function(t,k)
+ k = tonumber(k)
+ local v = rawget(t,k)
+ if v then
+ return v
+ end
+ local v = pdfdictionary { -- can be cached
+ S = pdf_goto,
+ D = pagedestinations[k],
+ }
+ t[k] = v
+ return v
+end)
-lpdf.pagedestination = pagedestination
+lpdf.pagereferences = pagereferences -- table
+lpdf.pagedestinations = pagedestinations -- table
local defaultdestination = pdfarray { 0, pdf_fit }
-local function link(url,filename,destination,page,actions)
- if filename and filename ~= "" then
- if file.basename(filename) == tex.jobname then
- return false
- else
- filename = file.addsuffix(filename,"pdf")
+-- fit is default (see lpdf-nod)
+
+local destinations = { } -- to be used soon
+
+local function pdfregisterdestination(name,reference)
+ local d = destinations[name]
+ if d then
+ report_destination("ignoring duplicate destination %a with reference %a",name,reference)
+ else
+ destinations[name] = reference
+ end
+end
+
+lpdf.registerdestination = pdfregisterdestination
+
+local maxslice = 32 -- could be made configureable ... 64 is also ok
+
+luatex.registerstopactions(function()
+ if log_destinations and next(destinations) then
+ local logsnewline = logs.newline
+ local log_destinations = logs.reporter("system","references")
+ local log_destination = logs.reporter("destination")
+ logs.pushtarget("logfile")
+ logsnewline()
+ log_destinations("start used destinations")
+ logsnewline()
+ local n = 0
+ for destination, pagenumber in table.sortedhash(destinations) do
+ log_destination("% 4i : %-5s : %s",pagenumber,usedviews[destination] or defaultview,destination)
+ n = n + 1
+ end
+ logsnewline()
+ log_destinations("stop used destinations")
+ logsnewline()
+ logs.poptarget()
+ report_destination("%s destinations saved in log file",n)
+ end
+end)
+
+
+local function pdfnametree(destinations)
+ local slices = { }
+ local sorted = table.sortedkeys(destinations)
+ local size = #sorted
+
+ if size <= 1.5*maxslice then
+ maxslice = size
+ end
+
+ for i=1,size,maxslice do
+ local amount = min(i+maxslice-1,size)
+ local names = pdfarray { }
+ for j=i,amount do
+ local destination = sorted[j]
+ local pagenumber = destinations[destination]
+ names[#names+1] = destination
+ names[#names+1] = pdfreference(pagenumber)
end
+ local first = sorted[i]
+ local last = sorted[amount]
+ local limits = pdfarray {
+ first,
+ last,
+ }
+ local d = pdfdictionary {
+ Names = names,
+ Limits = limits,
+ }
+ slices[#slices+1] = {
+ reference = pdfreference(pdfflushobject(d)),
+ limits = limits,
+ }
end
- if url and url ~= "" then
- if filename and filename ~= "" then
- if destination and destination ~= "" then
- url = file.join(url,filename).."#"..destination
+ local function collectkids(slices,first,last)
+ local k = pdfarray()
+ local d = pdfdictionary {
+ Kids = k,
+ Limits = pdfarray {
+ slices[first].limits[1],
+ slices[last ].limits[2],
+ },
+ }
+ for i=first,last do
+ k[#k+1] = slices[i].reference
+ end
+ return d
+ end
+ if #slices == 1 then
+ return slices[1].reference
+ else
+ while true do
+ if #slices > maxslice then
+ local temp = { }
+ local size = #slices
+ for i=1,size,maxslice do
+ local kids = collectkids(slices,i,min(i+maxslice-1,size))
+ temp[#temp+1] = {
+ reference = pdfreference(pdfflushobject(kids)),
+ limits = kids.Limits,
+ }
+ end
+ slices = temp
else
- url = file.join(url,filename)
+ return pdfreference(pdfflushobject(collectkids(slices,1,#slices)))
end
end
- return pdfdictionary {
- S = pdf_uri,
- URI = url,
- }
- elseif filename and filename ~= "" then
- -- no page ?
- if destination == "" then
+ end
+end
+
+local function pdfdestinationspecification()
+ if next(destinations) then -- safeguard
+ local r = pdfnametree(destinations)
+ -- pdfaddtocatalog("Dests",r)
+ pdfaddtonames("Dests",r)
+ if not log_destinations then
+ destinations = nil
+ end
+ end
+end
+
+lpdf.nametree = pdfnametree
+lpdf.destinationspecification = pdfdestinationspecification
+
+lpdf.registerdocumentfinalizer(pdfdestinationspecification,"collect destinations")
+
+-- todo
+
+local destinations = { }
+
+local f_xyz = formatters["<< /D [ %i 0 R /XYZ %0.3F %0.3F null ] >>"]
+local f_fit = formatters["<< /D [ %i 0 R /Fit ] >>"]
+local f_fitb = formatters["<< /D [ %i 0 R /FitB ] >>"]
+local f_fith = formatters["<< /D [ %i 0 R /FitH %0.3F ] >>"]
+local f_fitv = formatters["<< /D [ %i 0 R /FitV %0.3F ] >>"]
+local f_fitbh = formatters["<< /D [ %i 0 R /FitBH %0.3F ] >>"]
+local f_fitbv = formatters["<< /D [ %i 0 R /FitBV %0.3F ] >>"]
+local f_fitr = formatters["<< /D [ %i 0 R /FitR [ %0.3F %0.3F %0.3F %0.3F ] ] >>"]
+
+local v_standard = variables.standard
+local v_frame = variables.frame
+local v_width = variables.width
+local v_minwidth = variables.minwidth
+local v_height = variables.height
+local v_minheight = variables.minheight
+local v_fit = variables.fit
+local v_tight = variables.tight
+
+-- nicer is to create dictionaries and set properties but it's a bit overkill
+
+local destinationactions = {
+ [v_standard] = function(r,w,h,d) return f_xyz (r,pdfrectangle(w,h,d)) end, -- local left,top with zoom (0 in our case)
+ [v_frame] = function(r,w,h,d) return f_fitr (r,pdfrectangle(w,h,d)) end, -- fit rectangle in window
+ [v_width] = function(r,w,h,d) return f_fith (r, gethpos() *factor) end, -- top coordinate, fit width of page in window
+ [v_minwidth] = function(r,w,h,d) return f_fitbh(r, gethpos() *factor) end, -- top coordinate, fit width of content in window
+ [v_height] = function(r,w,h,d) return f_fitv (r,(getvpos()+h)*factor) end, -- left coordinate, fit height of page in window
+ [v_minheight] = function(r,w,h,d) return f_fitbv(r,(getvpos()+h)*factor) end, -- left coordinate, fit height of content in window
+ [v_fit] = f_fit, -- fit page in window
+ [v_tight] = f_fitb, -- fit content in window
+}
+
+local mapping = {
+ [v_standard] = v_standard, xyz = v_standard,
+ [v_frame] = v_frame, fitr = v_frame,
+ [v_width] = v_width, fith = v_width,
+ [v_minwidth] = v_minwidth, fitbh = v_minwidth,
+ [v_height] = v_height, fitv = v_height,
+ [v_minheight] = v_minheight, fitbv = v_minheight,
+ [v_fit] = v_fit, fit = v_fit,
+ [v_tight] = v_tight, fitb = v_tight,
+}
+
+local defaultview = v_fit
+local defaultaction = destinationactions[defaultview]
+
+-- A complication is that we need to use named destinations when we have views so we
+-- end up with a mix. A previous versions just output multiple destinations but not
+-- that we noved all to here we can be more sparse.
+
+local pagedestinations = { }
+
+table.setmetatableindex(pagedestinations,function(t,k)
+ local v = pdfdelayedobject(f_fit(k))
+ t[k] = v
+ return v
+end)
+
+local function flushdestination(width,height,depth,names,view)
+ local r = pdfpagereference(texgetcount("realpageno"))
+ if view == defaultview then
+ r = pagedestinations[r]
+ else
+ local action = view and destinationactions[view] or defaultaction
+ r = pdfdelayedobject(action(r,width,height,depth))
+ end
+ for n=1,#names do
+ local name = names[n]
+ if name then
+ pdfregisterdestination(name,r)
+ end
+ end
+end
+
+function nodeinjections.destination(width,height,depth,names,view)
+ -- todo check if begin end node / was comment
+ view = view and mapping[view] or defaultview
+ if trace_destinations then
+ report_destination("width %p, height %p, depth %p, names %|t, view %a",width,height,depth,names,view)
+ end
+ local method = references.innermethod
+ local noview = view == defaultview
+ local doview = false
+ -- we could save some aut's by using a name when given but it doesn't pay off apart
+ -- from making the code messy and tracing hard .. we only save some destinations
+ -- which we already share anyway
+ for n=1,#names do
+ local name = names[n]
+ if usedviews[name] then
+ -- already done, maybe a warning
+ elseif type(name) == "number" then
+ if noview then
+ usedviews[name] = view
+ names[n] = false
+ elseif method == v_page then
+ usedviews[name] = view
+ names[n] = false
+ else
+ local used = usedinternals[name]
+ if used and used ~= defaultview then
+ usedviews[name] = view
+ names[n] = autoprefix .. name
+ doview = true
+ end
+ end
+ elseif method == v_page then
+ usedviews[name] = view
+ else
+ usedviews[name] = view
+ doview = true
+ end
+ end
+ if doview then
+ return latelua_function_node(function() flushdestination(width,height,depth,names,view) end)
+ end
+end
+
+-- we could share dictionaries ... todo
+
+local function somedestination(destination,internal,page) -- no view anyway
+ if references.innermethod ~= v_page then
+ if type(destination) == "number" then
+ if not internal then
+ internal = destination
+ end
destination = nil
end
- if not destination and page then
- destination = pdfarray { page - 1, pdf_fit }
+ if internal then
+ flaginternals[internal] = true -- for bookmarks and so
+ local used = usedinternals[internal]
+ if used == defaultview or used == true then
+ return pagereferences[page]
+ end
+ if type(destination) ~= "string" then
+ destination = autoprefix .. internal
+ end
+ return pdfdictionary {
+ S = pdf_goto,
+ D = destination,
+ }
end
- return pdfdictionary {
- S = pdf_gotor, -- can also be pdf_launch
- F = filename,
- D = destination or defaultdestination, -- D is mandate
- NewWindow = (actions.newwindow and true) or nil,
- }
- elseif destination and destination ~= "" then
- return pdfdictionary { -- can be cached
- S = pdf_goto,
- D = destination,
- }
- else
- local p = tonumber(page)
- if p and p > 0 then
- return pdfdictionary { -- can be cached
+ if destination then
+ -- hopefully this one is flushed
+ return pdfdictionary {
S = pdf_goto,
- D = pdfarray {
- pdfreference(pdfpagereference(p)),
- pdf_fit,
- }
+ D = destination,
}
- elseif trace_references then
- report_reference("invalid page reference %a",page)
end
end
- return false
+ return pagereferences[page]
end
-lpdf.link = link
+-- annotations
-function lpdf.launch(program,parameters)
- if program and program ~= "" then
- local d = pdfdictionary {
- S = pdf_launch,
- F = program,
- D = ".",
- }
- if parameters and parameters ~= "" then
- d.P = parameters
- end
- return d
+local pdflink = somedestination
+
+local function pdffilelink(filename,destination,page,actions)
+ if not filename or filename == "" or file.basename(filename) == tex.jobname then
+ return false
+ end
+ filename = file.addsuffix(filename,"pdf")
+ if not destination or destination == "" then
+ destination = pdfarray { (page or 0) - 1, pdf_fit }
end
+ return pdfdictionary {
+ S = pdf_gotor, -- can also be pdf_launch
+ F = filename,
+ D = destination or defaultdestination, -- D is mandate
+ NewWindow = actions.newwindow and true or nil,
+ }
end
-function lpdf.javascript(name,arguments)
+local function pdfurllink(url,destination,page)
+ if not url or url == "" then
+ return false
+ end
+ if destination and destination ~= "" then
+ url = url .. "#" .. destination
+ end
+ return pdfdictionary {
+ S = pdf_uri,
+ URI = url,
+ }
+end
+
+local function pdflaunch(program,parameters)
+ if not program or program == "" then
+ return false
+ end
+ return pdfdictionary {
+ S = pdf_launch,
+ F = program,
+ D = ".",
+ P = parameters ~= "" and parameters or nil
+ }
+end
+
+local function pdfjavascript(name,arguments)
local script = javascriptcode(name,arguments) -- make into object (hash)
if script then
return pdfdictionary {
@@ -218,9 +559,11 @@ function codeinjections.prerollreference(actions) -- share can become option
if actions then
local main, n = pdfaction(actions)
if main then
- main = pdfdictionary {
+ local bs, bc = pdfborder()
+ main = pdfdictionary {
Subtype = pdf_link,
- Border = pdf_border,
+ Border = bs,
+ C = bc,
H = (not actions.highlight and pdf_n) or nil,
A = pdfshareobjectreference(main),
F = 4, -- print (mandate in pdf/a)
@@ -230,131 +573,146 @@ function codeinjections.prerollreference(actions) -- share can become option
end
end
-local function use_normal_annotations()
-
- local function reference(width,height,depth,prerolled) -- keep this one
- if prerolled then
- if trace_references then
- report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
- end
- return pdfannotation_node(width,height,depth,prerolled)
- end
- end
-
- local function finishreference()
- end
-
- return reference, finishreference
-
-end
+-- local function use_normal_annotations()
+--
+-- local function reference(width,height,depth,prerolled) -- keep this one
+-- if prerolled then
+-- if trace_references then
+-- report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
+-- end
+-- return pdfannotation_node(width,height,depth,prerolled)
+-- end
+-- end
+--
+-- local function finishreference()
+-- end
+--
+-- return reference, finishreference
+--
+-- end
-- eventually we can do this for special refs only
-local hashed, nofunique, nofused = { }, 0, 0
-
-local f_annot = formatters["<< /Type /Annot %s /Rect [%0.3f %0.3f %0.3f %0.3f] >>"]
-local f_bpnf = formatters["_bpnf_(%s,%s,%s,'%s')"]
+local hashed = { }
+local nofunique = 0
+local nofused = 0
+local nofspecial = 0
+local share = true
-local function use_shared_annotations()
+local f_annot = formatters["<< /Type /Annot %s /Rect [ %0.3F %0.3F %0.3F %0.3F ] >>"]
- local factor = number.dimenfactors.bp
+directives.register("refences.sharelinks", function(v) share = v end)
- local function finishreference(width,height,depth,prerolled) -- %0.2f looks okay enough (no scaling anyway)
- local h, v = pdf.h, pdf.v
- local llx, lly = h*factor, (v - depth)*factor
- local urx, ury = (h + width)*factor, (v + height)*factor
- local annot = f_annot(prerolled,llx,lly,urx,ury)
- local n = hashed[annot]
- if not n then
- n = pdfdelayedobject(annot)
- hashed[annot] = n
- nofunique = nofunique + 1
- end
- nofused = nofused + 1
- pdfregisterannotation(n)
+table.setmetatableindex(hashed,function(t,k)
+ local v = pdfdelayedobject(k)
+ if share then
+ t[k] = v
end
+ nofunique = nofunique + 1
+ return v
+end)
+
+local function finishreference(width,height,depth,prerolled) -- %0.2f looks okay enough (no scaling anyway)
+ local annot = hashed[f_annot(prerolled,pdfrectangle(width,height,depth))]
+ nofused = nofused + 1
+ return pdfregisterannotation(annot)
+end
- _bpnf_ = finishreference
-
- local function reference(width,height,depth,prerolled)
- if prerolled then
- if trace_references then
- report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
- end
- local luacode = f_bpnf(width,height,depth,prerolled)
- return latelua_node(luacode)
- end
+local function finishannotation(width,height,depth,prerolled,r)
+ local annot = f_annot(prerolled,pdfrectangle(width,height,depth))
+ if r then
+ pdfdelayedobject(annot,r)
+ else
+ r = pdfdelayedobject(annot)
end
+ nofspecial = nofspecial + 1
+ return pdfregisterannotation(r)
+end
- statistics.register("pdf annotations", function()
- if nofused > 0 then
- return format("%s embedded, %s unique",nofused,nofunique)
- else
- return nil
+function nodeinjections.reference(width,height,depth,prerolled)
+ if prerolled then
+ if trace_references then
+ report_reference("link: width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
end
- end)
-
-
- return reference, finishreference
-
+ return latelua_function_node(function() finishreference(width,height,depth,prerolled) end)
+ end
end
-local lln = latelua_node() if node.has_field(lln,'string') then
-
- directives.register("refences.sharelinks", function(v)
- if v then
- nodeinjections.reference, codeinjections.finishreference = use_shared_annotations()
- else
- nodeinjections.reference, codeinjections.finishreference = use_normal_annotations()
+function nodeinjections.annotation(width,height,depth,prerolled,r)
+ if prerolled then
+ if trace_references then
+ report_reference("special: width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
end
- end)
+ return latelua_function_node(function() finishannotation(width,height,depth,prerolled,r or false) end)
+ end
+end
- nodeinjections.reference, codeinjections.finishreference = use_shared_annotations()
+-- beware, we register during a latelua sweep so we have to make sure that
+-- we finalize after that (also in a latelua for the moment as we have no
+-- callback yet)
-else
+local annotations = nil
- nodeinjections.reference, codeinjections.finishreference = use_normal_annotations()
+function lpdf.registerannotation(n)
+ if annotations then
+ annotations[#annotations+1] = pdfreference(n)
+ else
+ annotations = pdfarray { pdfreference(n) } -- no need to use lpdf.array cum suis
+ end
+end
-end node.free(lln)
+pdfregisterannotation = lpdf.registerannotation
--- -- -- --
--- -- -- --
+function lpdf.annotationspecification()
+ if annotations then
+ local r = pdfdelayedobject(tostring(annotations)) -- delayed so okay in latelua
+ pdfaddtopageattributes("Annots",pdfreference(r))
+ annotations = nil
+ end
+end
-local done = { } -- prevent messages
+lpdf.registerpagefinalizer(lpdf.annotationspecification,"finalize annotations")
-function nodeinjections.destination(width,height,depth,name,view)
- if not done[name] then
- done[name] = true
- if trace_destinations then
- report_destination("width %p, height %p, depth %p, name %a, view %a",width,height,depth,name,view)
- end
- return pdfdestination_node(width,height,depth,name,view) -- can be begin/end node
+statistics.register("pdf annotations", function()
+ if nofused > 0 or nofspecial > 0 then
+ return format("%s links (%s unique), %s special",nofused,nofunique,nofspecial)
+ else
+ return nil
end
-end
+end)
-- runners and specials
runners["inner"] = function(var,actions)
- if getinnermethod() == "names" then
+ local internal = false
+ local inner = nil
+ if references.innermethod == v_auto then
local vi = var.i
if vi then
local vir = vi.references
if vir then
- local internal = vir.internal
+ -- todo: no need for it when we have a real reference
+ local reference = vir.reference
+ if reference and reference ~= "" then
+ var.inner = reference
+ local prefix = var.p
+ if prefix and prefix ~= "" then
+ var.prefix = prefix
+ inner = prefix .. ":" .. reference
+ else
+ inner = reference
+ end
+ end
+ internal = vir.internal
if internal then
- var.inner = "aut:" .. internal
+ flaginternals[internal] = true
end
end
end
else
var.inner = nil
end
- local prefix = var.p
- local inner = var.inner
- if inner and prefix and prefix ~= "" then
- inner = prefix .. ":" .. inner -- might not always be ok
- end
- return link(nil,nil,inner,var.r,actions)
+ return pdflink(inner,internal,var.r)
end
runners["inner with arguments"] = function(var,actions)
@@ -364,12 +722,15 @@ end
runners["outer"] = function(var,actions)
local file, url = references.checkedfileorurl(var.outer,var.outer)
- return link(url,file,var.arguments,nil,actions)
+ if file then
+ return pdffilelink(file,var.arguments,nil,actions)
+ elseif url then
+ return pdfurllink(url,var.arguments,nil,actions)
+ end
end
runners["outer with inner"] = function(var,actions)
- local file = references.checkedfile(var.outer) -- was var.f but fails ... why
- return link(nil,file,var.inner,var.r,actions)
+ return pdffilelink(references.checkedfile(var.outer),var.inner,var.r,actions)
end
runners["special outer with operation"] = function(var,actions)
@@ -416,12 +777,9 @@ function specials.internal(var,actions) -- better resolve in strc-ref
if not v then
-- error
report_reference("no internal reference %a",i)
- elseif getinnermethod() == "names" then
- -- named
- return link(nil,nil,"aut:"..i,v.references.realpage,actions)
else
- -- page
- return link(nil,nil,nil,v.references.realpage,actions)
+ flaginternals[i] = true
+ return pdflink(nil,i,v.references.realpage)
end
end
@@ -434,8 +792,7 @@ local pages = references.pages
function specials.page(var,actions)
local file = var.f
if file then
- file = references.checkedfile(file)
- return link(nil,file,nil,var.operation,actions)
+ return pdffilelink(references.checkedfile(file),nil,var.operation,actions)
else
local p = var.r
if not p then -- todo: call special from reference code
@@ -445,29 +802,24 @@ function specials.page(var,actions)
else
p = references.realpageofpage(tonumber(p))
end
- -- if p then
- -- var.r = p
- -- end
end
- return link(nil,nil,nil,p or var.operation,actions)
+ return pdflink(nil,nil,p or var.operation)
end
end
function specials.realpage(var,actions)
local file = var.f
if file then
- file = references.checkedfile(file)
- return link(nil,file,nil,var.operation,actions)
+ return pdffilelink(references.checkedfile(file),nil,var.operation,actions)
else
- return link(nil,nil,nil,var.operation,actions)
+ return pdflink(nil,nil,var.operation)
end
end
function specials.userpage(var,actions)
local file = var.f
if file then
- file = references.checkedfile(file)
- return link(nil,file,nil,var.operation,actions)
+ return pdffilelink(references.checkedfile(file),nil,var.operation,actions)
else
local p = var.r
if not p then -- todo: call special from reference code
@@ -479,15 +831,15 @@ function specials.userpage(var,actions)
-- var.r = p
-- end
end
- return link(nil,nil,nil,p or var.operation,actions)
+ return pdflink(nil,nil,p or var.operation)
end
end
function specials.deltapage(var,actions)
local p = tonumber(var.operation)
if p then
- p = references.checkedrealpage(p + texcount.realpageno)
- return link(nil,nil,nil,p,actions)
+ p = references.checkedrealpage(p + texgetcount("realpageno"))
+ return pdflink(nil,nil,p)
end
end
@@ -527,27 +879,29 @@ function specials.order(var,actions) -- references.specials !
end
function specials.url(var,actions)
- local url = references.checkedurl(var.operation)
- return link(url,nil,var.arguments,nil,actions)
+ return pdfurllink(references.checkedurl(var.operation),var.arguments,nil,actions)
end
function specials.file(var,actions)
- local file = references.checkedfile(var.operation)
- return link(nil,file,var.arguments,nil,actions)
+ return pdffilelink(references.checkedfile(var.operation),var.arguments,nil,actions)
end
function specials.fileorurl(var,actions)
local file, url = references.checkedfileorurl(var.operation,var.operation)
- return link(url,file,var.arguments,nil,actions)
+ if file then
+ return pdffilelink(file,var.arguments,nil,actions)
+ elseif url then
+ return pdfurllink(url,var.arguments,nil,actions)
+ end
end
function specials.program(var,content)
local program = references.checkedprogram(var.operation)
- return lpdf.launch(program,var.arguments)
+ return pdflaunch(program,var.arguments)
end
function specials.javascript(var)
- return lpdf.javascript(var.operation,var.arguments)
+ return pdfjavascript(var.operation,var.arguments)
end
specials.JS = specials.javascript
@@ -671,11 +1025,6 @@ function specials.action(var)
end
end
---~ entry.A = pdfdictionary {
---~ S = pdf_goto,
---~ D = ....
---~ }
-
local function build(levels,start,parent,method)
local startlevel = levels[start][1]
local i, n = start, 0
@@ -700,12 +1049,9 @@ local function build(levels,start,parent,method)
Title = pdfunicode(title),
Parent = parent,
Prev = prev and pdfreference(prev),
+ A = somedestination(reference.internal,reference.internal,reference.realpage),
}
- if method == "internal" then
- entry.Dest = "aut:" .. reference.internal
- else -- if method == "page" then
- entry.Dest = pagedestination(reference.realpage)
- end
+ -- entry.Dest = somedestination(reference.internal,reference.internal,reference.realpage)
if not first then first, last = child, child end
prev = child
last = prev
@@ -744,10 +1090,10 @@ function codeinjections.addbookmarks(levels,method)
Count = m,
}
pdfflushobject(parent,dict)
- lpdf.addtocatalog("Outlines",lpdf.reference(parent))
+ pdfaddtocatalog("Outlines",lpdf.reference(parent))
end
end
-- this could also be hooked into the frontend finalizer
-lpdf.registerdocumentfinalizer(function() bookmarks.place() end,1,"bookmarks")
+lpdf.registerdocumentfinalizer(function() bookmarks.place() end,1,"bookmarks") -- hm, why indirect call
diff --git a/Master/texmf-dist/tex/context/base/lpdf-col.lua b/Master/texmf-dist/tex/context/base/lpdf-col.lua
index b358d082098..9e483f9b51f 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-col.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-col.lua
@@ -14,42 +14,49 @@ local formatters = string.formatters
local backends, lpdf, nodes = backends, lpdf, nodes
-local allocate = utilities.storage.allocate
-local formatters = string.formatters
-
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-local registrations = backends.pdf.registrations
-
-local nodepool = nodes.pool
-local register = nodepool.register
-local pdfliteral = nodepool.pdfliteral
-
-local pdfconstant = lpdf.constant
-local pdfstring = lpdf.string
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfreference = lpdf.reference
-local pdfverbose = lpdf.verbose
-local pdfflushobject = lpdf.flushobject
-local pdfflushstreamobject = lpdf.flushstreamobject
-
-local colors = attributes.colors
-local transparencies = attributes.transparencies
-local registertransparancy = transparencies.register
-local registercolor = colors.register
-local colorsvalue = colors.value
-local transparenciesvalue = transparencies.value
-local forcedmodel = colors.forcedmodel
-
-local c_transparency = pdfconstant("Transparency")
-
-local f_gray = formatters["%.3f g %.3f G"]
-local f_rgb = formatters["%.3f %.3f %.3f rg %.3f %.3f %.3f RG"]
-local f_cmyk = formatters["%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K"]
+local allocate = utilities.storage.allocate
+local formatters = string.formatters
+
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+local registrations = backends.pdf.registrations
+
+local nodepool = nodes.pool
+local register = nodepool.register
+local pdfliteral = nodepool.pdfliteral
+
+local pdfconstant = lpdf.constant
+local pdfstring = lpdf.string
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfreference = lpdf.reference
+local pdfverbose = lpdf.verbose
+local pdfflushobject = lpdf.flushobject
+local pdfdelayedobject = lpdf.delayedobject
+local pdfflushstreamobject = lpdf.flushstreamobject
+
+local pdfshareobjectreference = lpdf.shareobjectreference
+
+local addtopageattributes = lpdf.addtopageattributes
+local adddocumentcolorspace = lpdf.adddocumentcolorspace
+local adddocumentextgstate = lpdf.adddocumentextgstate
+
+local colors = attributes.colors
+local transparencies = attributes.transparencies
+local registertransparancy = transparencies.register
+local registercolor = colors.register
+local colorsvalue = colors.value
+local transparenciesvalue = transparencies.value
+local forcedmodel = colors.forcedmodel
+
+local c_transparency = pdfconstant("Transparency")
+
+local f_gray = formatters["%.3F g %.3F G"]
+local f_rgb = formatters["%.3F %.3F %.3F rg %.3F %.3F %.3F RG"]
+local f_cmyk = formatters["%.3F %.3F %.3F %.3F k %.3F %.3F %.3F %.3F K"]
local f_spot = formatters["/%s cs /%s CS %s SCN %s scn"]
local f_tr = formatters["Tr%s"]
-local f_cm = formatters["q %f %f %f %f %f %f cm"]
+local f_cm = formatters["q %F %F %F %F %F %F cm"]
local f_effect = formatters["%s Tc %s w %s Tr"]
local f_tr_gs = formatters["/Tr%s gs"]
local f_num_1 = tostring
@@ -76,11 +83,13 @@ lpdf.transparencygroups = transparencygroups
table.setmetatableindex(transparencygroups, function(transparencygroups,colormodel)
local cs = colorspaceconstants[colormodel]
if cs then
- local g = pdfreference(pdfflushobject(pdfdictionary {
+ local d = pdfdictionary {
S = c_transparency,
CS = cs,
I = true,
- }))
+ }
+ -- local g = pdfreference(pdfflushobject(tostring(d)))
+ local g = pdfreference(pdfdelayedobject(tostring(d)))
transparencygroups[colormodel] = g
return g
else
@@ -95,7 +104,7 @@ local function addpagegroup()
if currentgroupcolormodel then
local g = transparencygroups[currentgroupcolormodel]
if g then
- lpdf.addtopageattributes("Group",g)
+ addtopageattributes("Group",g)
end
end
end
@@ -224,7 +233,7 @@ local function registersomespotcolor(name,noffractions,names,p,colorspace,range,
local mr = pdfreference(m)
spotcolorhash[name] = m
documentcolorspaces[name] = mr
- lpdf.adddocumentcolorspace(name,mr)
+ adddocumentcolorspace(name,mr)
else
local cnames = pdfarray()
local domain = pdfarray()
@@ -280,13 +289,13 @@ local function registersomespotcolor(name,noffractions,names,p,colorspace,range,
cnames,
colorspace,
pdfreference(calculation),
- lpdf.shareobjectreference(tostring(channels)), -- optional but needed for shades
+ pdfshareobjectreference(tostring(channels)), -- optional but needed for shades
}
local m = pdfflushobject(array)
local mr = pdfreference(m)
spotcolorhash[name] = m
documentcolorspaces[name] = mr
- lpdf.adddocumentcolorspace(name,mr)
+ adddocumentcolorspace(name,mr)
end
end
@@ -336,7 +345,7 @@ local function registersomeindexcolor(name,noffractions,names,p,colorspace,range
end
vector = pdfverbose { "<", concat(vector, " "), ">" }
local n = pdfflushobject(pdfarray{ pdf_indexed, a, 255, vector })
- lpdf.adddocumentcolorspace(format("%s_indexed",name),pdfreference(n))
+ adddocumentcolorspace(format("%s_indexed",name),pdfreference(n))
return n
end
@@ -455,7 +464,7 @@ function registrations.transparency(n,a,t)
local mr = pdfreference(m)
transparencyhash[0] = m
documenttransparencies[0] = mr
- lpdf.adddocumentextgstate("Tr0",mr)
+ adddocumentextgstate("Tr0",mr)
done = true
end
if n > 0 and not transparencyhash[n] then
@@ -470,7 +479,7 @@ function registrations.transparency(n,a,t)
local mr = pdfreference(m)
transparencyhash[n] = m
documenttransparencies[n] = mr
- lpdf.adddocumentextgstate(f_tr(n),mr)
+ adddocumentextgstate(f_tr(n),mr)
end
end
@@ -689,7 +698,7 @@ end
-- this will move to lpdf-spe.lua
-local f_slant = formatters["pdf: q 1 0 %f 1 0 0 cm"]
+local f_slant = formatters["pdf: q 1 0 %F 1 0 0 cm"]
backends.pdf.tables.vfspecials = allocate { -- todo: distinguish between glyph and rule color
diff --git a/Master/texmf-dist/tex/context/base/lpdf-epa.lua b/Master/texmf-dist/tex/context/base/lpdf-epa.lua
index 034e6d7e241..61d57b8d361 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-epa.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-epa.lua
@@ -15,11 +15,12 @@ local formatters = string.formatters
----- lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local trace_links = false trackers.register("figures.links", function(v) trace_links = v end)
+local trace_links = false trackers.register("figures.links", function(v) trace_links = v end)
+local report_link = logs.reporter("backend","merging")
-local report_link = logs.reporter("backend","merging")
-
-local backends, lpdf = backends, lpdf
+local backends = backends
+local lpdf = lpdf
+local context = context
local variables = interfaces.variables
local codeinjections = backends.pdf.codeinjections
diff --git a/Master/texmf-dist/tex/context/base/lpdf-epd.lua b/Master/texmf-dist/tex/context/base/lpdf-epd.lua
index b9f8cfc7ccc..a7399f6b4e2 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-epd.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-epd.lua
@@ -57,7 +57,7 @@ lpdf.epdf = { }
local checked_access
-local function prepare(document,d,t,n,k)
+local function prepare(document,d,t,n,k,mt)
for i=1,n do
local v = d:getVal(i)
local r = d:getValNF(i)
@@ -78,15 +78,16 @@ local function prepare(document,d,t,n,k)
t[d:getKey(i)] = checked_access[v:getTypeName()](v,document)
end
end
- getmetatable(t).__index = nil
+ getmetatable(t).__index = nil -- ?? weird
+setmetatable(t,mt)
return t[k]
end
-local function some_dictionary(d,document,r)
+local function some_dictionary(d,document,r,mt)
local n = d and d:getLength() or 0
if n > 0 then
local t = { }
- setmetatable(t, { __index = function(t,k) return prepare(document,d,t,n,k) end } )
+ setmetatable(t, { __index = function(t,k) return prepare(document,d,t,n,k,mt) end } )
return t
end
end
@@ -153,7 +154,7 @@ local function some_stream(d,document,r)
end
end
--- we need epdf.getBool
+-- we need epdf.boolean(v) in addition to v:getBool() [dictionary, array, stream, real, integer, string, boolean, name, ref, null]
checked_access = {
dictionary = function(d,document,r)
@@ -260,7 +261,8 @@ local function getlayers(document)
end
end
-local function getpages(document)
+
+local function getpages(document,Catalog)
local data = document.data
local xrefs = document.xrefs
local cache = document.cache
@@ -268,9 +270,33 @@ local function getpages(document)
local xref = data:getXRef()
local pages = { }
local nofpages = cata:getNumPages()
+-- local function getpagestuff(pagenumber,k)
+-- if k == "MediaBox" then
+-- local pageobj = cata:getPage(pagenumber)
+-- local pagebox = pageobj:getMediaBox()
+-- return { pagebox.x1, pagebox.y1, pagebox.x2, pagebox.y2 }
+-- elseif k == "CropBox" then
+-- local pageobj = cata:getPage(pagenumber)
+-- local pagebox = pageobj:getMediaBox()
+-- return { pagebox.x1, pagebox.y1, pagebox.x2, pagebox.y2 }
+-- elseif k == "Resources" then
+-- print("todo page resources from parent")
+-- -- local pageobj = cata:getPage(pagenumber)
+-- -- local resources = pageobj:getResources()
+-- end
+-- end
+-- for pagenumber=1,nofpages do
+-- local mt = { __index = function(t,k)
+-- local v = getpagestuff(pagenumber,k)
+-- if v then
+-- t[k] = v
+-- end
+-- return v
+-- end }
+ local mt = { __index = Catalog.Pages }
for pagenumber=1,nofpages do
local pagereference = cata:getPageRef(pagenumber).num
- local pagedata = some_dictionary(xref:fetch(pagereference,0):getDict(),document,pagereference)
+ local pagedata = some_dictionary(xref:fetch(pagereference,0):getDict(),document,pagereference,mt)
if pagedata then
pagedata.number = pagenumber
pages[pagenumber] = pagedata
@@ -298,6 +324,9 @@ local function delayed(document,tag,f)
return t
end
+-- local catobj = data:getXRef():fetch(data:getXRef():getRootNum(),data:getXRef():getRootGen())
+-- print(catobj:getDict(),data:getXRef():getCatalog():getDict())
+
local loaded = { }
function lpdf.epdf.load(filename)
@@ -318,7 +347,7 @@ function lpdf.epdf.load(filename)
document.Info = Info
-- document.catalog = Catalog
-- a few handy helper tables
- document.pages = delayed(document,"pages", function() return getpages(document) end)
+ document.pages = delayed(document,"pages", function() return getpages(document,Catalog) end)
document.destinations = delayed(document,"destinations", function() return getnames(document,Catalog.Names and Catalog.Names.Dests) end)
document.javascripts = delayed(document,"javascripts", function() return getnames(document,Catalog.Names and Catalog.Names.JS) end)
document.widgets = delayed(document,"widgets", function() return getnames(document,Catalog.Names and Catalog.Names.AcroForm) end)
diff --git a/Master/texmf-dist/tex/context/base/lpdf-fld.lua b/Master/texmf-dist/tex/context/base/lpdf-fld.lua
index a9b9fd72db2..414562ad58a 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-fld.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-fld.lua
@@ -55,7 +55,8 @@ if not modules then modules = { } end modules ['lpdf-fld'] = {
-- for printing especially when highlighting (those colorfull foregrounds) is
-- on.
-local gmatch, lower, format = string.gmatch, string.lower, string.format
+local tostring, next = tostring, next
+local gmatch, lower, format, formatters = string.gmatch, string.lower, string.format, string.formatters
local lpegmatch = lpeg.match
local utfchar = utf.char
local bpfactor, todimen = number.dimenfactors.bp, string.todimen
@@ -92,14 +93,13 @@ local pdfflushobject = lpdf.flushobject
local pdfshareobjectreference = lpdf.shareobjectreference
local pdfshareobject = lpdf.shareobject
local pdfreserveobject = lpdf.reserveobject
-local pdfreserveannotation = lpdf.reserveannotation
local pdfaction = lpdf.action
-local hpack_node = node.hpack
-
-local nodepool = nodes.pool
+local pdfcolor = lpdf.color
+local pdfcolorvalues = lpdf.colorvalues
+local pdflayerreference = lpdf.layerreference
-local pdfannotation_node = nodepool.pdfannotation
+local hpack_node = node.hpack
local submitoutputformat = 0 -- 0=unknown 1=HTML 2=FDF 3=XML => not yet used, needs to be checked
@@ -125,39 +125,39 @@ function codeinjections.setformsmethod(name)
end
local flag = { -- /Ff
- ReadOnly = 1, -- 1
- Required = 2, -- 2
- NoExport = 4, -- 3
- MultiLine = 4096, -- 13
- Password = 8192, -- 14
- NoToggleToOff = 16384, -- 15
- Radio = 32768, -- 16
- PushButton = 65536, -- 17
- PopUp = 131072, -- 18
- Edit = 262144, -- 19
- Sort = 524288, -- 20
- FileSelect = 1048576, -- 21
- DoNotSpellCheck = 4194304, -- 23
- DoNotScroll = 8388608, -- 24
- Comb = 16777216, -- 25
- RichText = 33554432, -- 26
- RadiosInUnison = 33554432, -- 26
- CommitOnSelChange = 67108864, -- 27
+ ReadOnly = 2^ 0, -- 1
+ Required = 2^ 1, -- 2
+ NoExport = 2^ 2, -- 3
+ MultiLine = 2^12, -- 13
+ Password = 2^13, -- 14
+ NoToggleToOff = 2^14, -- 15
+ Radio = 2^15, -- 16
+ PushButton = 2^16, -- 17
+ PopUp = 2^17, -- 18
+ Edit = 2^18, -- 19
+ Sort = 2^19, -- 20
+ FileSelect = 2^20, -- 21
+ DoNotSpellCheck = 2^22, -- 23
+ DoNotScroll = 2^23, -- 24
+ Comb = 2^24, -- 25
+ RichText = 2^25, -- 26
+ RadiosInUnison = 2^25, -- 26
+ CommitOnSelChange = 2^26, -- 27
}
local plus = { -- /F
- Invisible = 1, -- 1
- Hidden = 2, -- 2
- Printable = 4, -- 3
- Print = 4, -- 3
- NoZoom = 8, -- 4
- NoRotate = 16, -- 5
- NoView = 32, -- 6
- ReadOnly = 64, -- 7
- Locked = 128, -- 8
- ToggleNoView = 256, -- 9
- LockedContents = 512, -- 10,
- AutoView = 256, -- 288 (6+9)
+ Invisible = 2^0, -- 1
+ Hidden = 2^1, -- 2
+ Printable = 2^2, -- 3
+ Print = 2^2, -- 3
+ NoZoom = 2^3, -- 4
+ NoRotate = 2^4, -- 5
+ NoView = 2^5, -- 6
+ ReadOnly = 2^6, -- 7
+ Locked = 2^7, -- 8
+ ToggleNoView = 2^8, -- 9
+ LockedContents = 2^9, -- 10,
+ AutoView = 2^8, -- 6 + 9 ?
}
-- todo: check what is interfaced
@@ -198,33 +198,82 @@ local function fieldplus(specification) -- /F
return n
end
-local function checked(what)
- local set, bug = references.identify("",what)
- if not bug and #set > 0 then
- local r, n = pdfaction(set)
- return pdfshareobjectreference(r)
- end
-end
+-- keep:
+--
+-- local function checked(what)
+-- local set, bug = references.identify("",what)
+-- if not bug and #set > 0 then
+-- local r, n = pdfaction(set)
+-- return pdfshareobjectreference(r)
+-- end
+-- end
+--
+-- local function fieldactions(specification) -- share actions
+-- local d, a = { }, nil
+-- a = specification.mousedown
+-- or specification.clickin if a and a ~= "" then d.D = checked(a) end
+-- a = specification.mouseup
+-- or specification.clickout if a and a ~= "" then d.U = checked(a) end
+-- a = specification.regionin if a and a ~= "" then d.E = checked(a) end -- Enter
+-- a = specification.regionout if a and a ~= "" then d.X = checked(a) end -- eXit
+-- a = specification.afterkey if a and a ~= "" then d.K = checked(a) end
+-- a = specification.format if a and a ~= "" then d.F = checked(a) end
+-- a = specification.validate if a and a ~= "" then d.V = checked(a) end
+-- a = specification.calculate if a and a ~= "" then d.C = checked(a) end
+-- a = specification.focusin if a and a ~= "" then d.Fo = checked(a) end
+-- a = specification.focusout if a and a ~= "" then d.Bl = checked(a) end
+-- a = specification.openpage if a and a ~= "" then d.PO = checked(a) end
+-- a = specification.closepage if a and a ~= "" then d.PC = checked(a) end
+-- -- a = specification.visiblepage if a and a ~= "" then d.PV = checked(a) end
+-- -- a = specification.invisiblepage if a and a ~= "" then d.PI = checked(a) end
+-- return next(d) and pdfdictionary(d)
+-- end
+
+local mapping = {
+ mousedown = "D", clickin = "D",
+ mouseup = "U", clickout = "U",
+ regionin = "E",
+ regionout = "X",
+ afterkey = "K",
+ format = "F",
+ validate = "V",
+ calculate = "C",
+ focusin = "Fo",
+ focusout = "Bl",
+ openpage = "PO",
+ closepage = "PC",
+ -- visiblepage = "PV",
+ -- invisiblepage = "PI",
+}
local function fieldactions(specification) -- share actions
- local d, a = { }, nil
- a = specification.mousedown
- or specification.clickin if a and a ~= "" then d.D = checked(a) end
- a = specification.mouseup
- or specification.clickout if a and a ~= "" then d.U = checked(a) end
- a = specification.regionin if a and a ~= "" then d.E = checked(a) end -- Enter
- a = specification.regionout if a and a ~= "" then d.X = checked(a) end -- eXit
- a = specification.afterkey if a and a ~= "" then d.K = checked(a) end
- a = specification.format if a and a ~= "" then d.F = checked(a) end
- a = specification.validate if a and a ~= "" then d.V = checked(a) end
- a = specification.calculate if a and a ~= "" then d.C = checked(a) end
- a = specification.focusin if a and a ~= "" then d.Fo = checked(a) end
- a = specification.focusout if a and a ~= "" then d.Bl = checked(a) end
- a = specification.openpage if a and a ~= "" then d.PO = checked(a) end
- a = specification.closepage if a and a ~= "" then d.PC = checked(a) end
- -- a = specification.visiblepage if a and a ~= "" then d.PV = checked(a) end
- -- a = specification.invisiblepage if a and a ~= "" then d.PI = checked(a) end
- return next(d) and pdfdictionary(d)
+ local d = nil
+ for key, target in next, mapping do
+ local code = specification[key]
+ if code and code ~= "" then
+ -- local a = checked(code)
+ local set, bug = references.identify("",code)
+ if not bug and #set > 0 then
+ local a = pdfaction(set) -- r, n
+ if a then
+ local r = pdfshareobjectreference(a)
+ if d then
+ d[target] = r
+ else
+ d = pdfdictionary { [target] = r }
+ end
+ else
+ report_fields("invalid field action %a, case %s",code,2)
+ end
+ else
+ report_fields("invalid field action %a, case %s",code,1)
+ end
+ end
+ end
+ -- if d then
+ -- d = pdfshareobjectreference(d) -- not much overlap or maybe only some patterns
+ -- end
+ return d
end
-- fonts and color
@@ -298,16 +347,16 @@ local function fieldsurrounding(specification)
fontsize = todimen(fontsize)
fontsize = fontsize and (bpfactor * fontsize) or 12
fontraise = 0.1 * fontsize -- todo: figure out what the natural one is and compensate for strutdp
- local fontcode = format("%0.4f Tf %0.4f Ts",fontsize,fontraise)
+ local fontcode = formatters["%0.4f Tf %0.4f Ts"](fontsize,fontraise)
-- we could test for colorvalue being 1 (black) and omit it then
- local colorcode = lpdf.color(3,colorvalue) -- we force an rgb color space
+ local colorcode = pdfcolor(3,colorvalue) -- we force an rgb color space
if trace_fields then
report_fields("using font, style %a, alternative %a, size %p, tag %a, code %a",fontstyle,fontalternative,fontsize,tag,fontcode)
report_fields("using color, value %a, code %a",colorvalue,colorcode)
end
local stream = pdfstream {
pdfconstant(tag),
- format("%s %s",fontcode,colorcode)
+ formatters["%s %s"](fontcode,colorcode)
}
usedfonts[tag] = a -- the name
-- move up with "x.y Ts"
@@ -570,17 +619,14 @@ local function todingbat(n)
end
end
--- local zero_bc = pdfarray { 0, 0, 0 }
--- local zero_bg = pdfarray { 1, 1, 1 }
-
local function fieldrendering(specification)
local bvalue = tonumber(specification.backgroundcolorvalue)
local fvalue = tonumber(specification.framecolorvalue)
local svalue = specification.fontsymbol
if bvalue or fvalue or (svalue and svalue ~= "") then
return pdfdictionary {
- BG = bvalue and pdfarray { lpdf.colorvalues(3,bvalue) } or nil, -- or zero_bg,
- BC = fvalue and pdfarray { lpdf.colorvalues(3,fvalue) } or nil, -- or zero_bc,
+ BG = bvalue and pdfarray { pdfcolorvalues(3,bvalue) } or nil, -- or zero_bg,
+ BC = fvalue and pdfarray { pdfcolorvalues(3,fvalue) } or nil, -- or zero_bc,
CA = svalue and pdfstring (svalue) or nil,
}
end
@@ -590,7 +636,7 @@ end
local function fieldlayer(specification) -- we can move this in line
local layer = specification.layer
- return (layer and lpdf.layerreference(layer)) or nil
+ return (layer and pdflayerreference(layer)) or nil
end
-- defining
@@ -611,7 +657,7 @@ local xfdftemplate = [[
function codeinjections.exportformdata(name)
local result = { }
for k, v in table.sortedhash(fields) do
- result[#result+1] = format(" %s",v.name or k,v.default or "")
+ result[#result+1] = formatters[" %s"](v.name or k,v.default or "")
end
local base = file.basename(tex.jobname)
local xfdf = format(xfdftemplate,base,table.concat(result,"\n"))
@@ -912,7 +958,7 @@ local function save_parent(field,specification,d,hasopt)
end
local function save_kid(field,specification,d,optname)
- local kn = pdfreserveannotation()
+ local kn = pdfreserveobject()
field.kids[#field.kids+1] = pdfreference(kn)
if optname then
local opt = field.opt
@@ -921,7 +967,7 @@ local function save_kid(field,specification,d,optname)
end
end
local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
- local box = hpack_node(pdfannotation_node(width,height,depth,d(),kn))
+ local box = hpack_node(nodeinjections.annotation(width,height,depth,d(),kn))
box.width, box.height, box.depth = width, height, depth -- redundant
return box
end
@@ -969,6 +1015,8 @@ local function makelinechild(name,specification)
if trace_fields then
report_fields("using child text %a",name)
end
+ -- we could save a little by not setting some key/value when it's the
+ -- same as parent but it would cost more memory to keep track of it
local d = pdfdictionary {
Subtype = pdf_widget,
Parent = pdfreference(parent.pobj),
diff --git a/Master/texmf-dist/tex/context/base/lpdf-fmt.lua b/Master/texmf-dist/tex/context/base/lpdf-fmt.lua
index 94c005f6570..568b801b493 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-fmt.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-fmt.lua
@@ -36,7 +36,7 @@ local pdfstring = lpdf.string
local pdfverbose = lpdf.verbose
local pdfflushstreamfileobject = lpdf.flushstreamfileobject
-local texset = tex.set -- we could make tex.setglobal
+local texset = tex.set
local addtoinfo = lpdf.addtoinfo
local injectxmpinfo = lpdf.injectxmpinfo
@@ -349,7 +349,7 @@ local filenames = {
}
local function locatefile(filename)
- local fullname = resolvers.findfile(filename,"icc")
+ local fullname = resolvers.findfile(filename,"icc",1,true)
if not fullname or fullname == "" then
fullname = resolvers.finders.byscheme("loc",filename) -- could be specific to the project
end
@@ -743,7 +743,7 @@ end
function codeinjections.supportedformats()
local t = { }
for k, v in table.sortedhash(formats) do
- if find(k,"pdf") then
+ if find(k,"pdf",1,true) then
t[#t+1] = k
end
end
diff --git a/Master/texmf-dist/tex/context/base/lpdf-grp.lua b/Master/texmf-dist/tex/context/base/lpdf-grp.lua
index fed5e6a4665..befe52c7656 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-grp.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-grp.lua
@@ -236,7 +236,7 @@ function img.package(image) -- see lpdf-u3d **
local height = boundingbox[4]
local xform = img.scan {
attr = resources(),
- stream = format("%f 0 0 %f 0 0 cm /%s Do",width,height,imagetag),
+ stream = format("%F 0 0 %F 0 0 cm /%s Do",width,height,imagetag),
bbox = { 0, 0, width/factor, height/factor },
}
img.immediatewrite(xform)
diff --git a/Master/texmf-dist/tex/context/base/lpdf-ini.lua b/Master/texmf-dist/tex/context/base/lpdf-ini.lua
index cd601f21f4b..a89b8b8c55a 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-ini.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-ini.lua
@@ -9,35 +9,178 @@ if not modules then modules = { } end modules ['lpdf-ini'] = {
local setmetatable, getmetatable, type, next, tostring, tonumber, rawset = setmetatable, getmetatable, type, next, tostring, tonumber, rawset
local char, byte, format, gsub, concat, match, sub, gmatch = string.char, string.byte, string.format, string.gsub, table.concat, string.match, string.sub, string.gmatch
local utfchar, utfvalues = utf.char, utf.values
-local sind, cosd = math.sind, math.cosd
+local sind, cosd, floor, max, min = math.sind, math.cosd, math.floor, math.max, math.min
local lpegmatch, P, C, R, S, Cc, Cs = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc, lpeg.Cs
local formatters = string.formatters
-local pdfreserveobject = pdf.reserveobj
-local pdfimmediateobject = pdf.immediateobj
-local pdfdeferredobject = pdf.obj
-local pdfreferenceobject = pdf.refobj
+local report_objects = logs.reporter("backend","objects")
+local report_finalizing = logs.reporter("backend","finalizing")
+local report_blocked = logs.reporter("backend","blocked")
+
+-- gethpos : used
+-- getpos : used
+-- getvpos : used
+--
+-- getmatrix : used
+-- hasmatrix : used
+--
+-- mapfile : used in font-ctx.lua
+-- mapline : used in font-ctx.lua
+--
+-- maxobjnum : not used
+-- obj : used
+-- immediateobj : used
+-- objtype : not used
+-- pageref : used
+-- print : can be used
+-- refobj : used
+-- registerannot : not to be used
+-- reserveobj : used
+
+-- pdf.catalog : used
+-- pdf.info : used
+-- pdf.trailer : used
+-- pdf.names : not to be used
+
+-- pdf.setinfo : used
+-- pdf.setcatalog : used
+-- pdf.setnames : not to be used
+-- pdf.settrailer : used
+
+-- pdf.getinfo : used
+-- pdf.getcatalog : used
+-- pdf.getnames : not to be used
+-- pdf.gettrailer : used
+
+local pdf = pdf
+local factor = number.dimenfactors.bp
+
+if pdf.setinfo then
+-- table.setmetatablenewindex(pdf,function(t,k,v)
+-- report_blocked("'pdf.%s' is not supported",k)
+-- end)
+ -- the getters are harmless
+end
+
+if not pdf.setinfo then
+ function pdf.setinfo (s) pdf.info = s end
+ function pdf.setcatalog(s) pdf.catalog = s end
+ function pdf.setnames (s) pdf.names = s end
+ function pdf.settrailer(s) pdf.trailer = s end
+end
+
+if not pdf.getpos then
+ function pdf.getpos () return pdf.h, pdf.v end
+ function pdf.gethpos () return pdf.h end
+ function pdf.getvpos () return pdf.v end
+ function pdf.hasmatrix() return false end
+ function pdf.getmatrix() return 1, 0, 0, 1, 0, 0 end
+end
+
+if not pdf.setpageresources then
+ function pdf.setpageresources (s) pdf.pageresources = s end
+ function pdf.setpageattributes (s) pdf.pageattributes = s end
+ function pdf.setpagesattributes(s) pdf.pagesattributes = s end
+end
+
+local pdfsetinfo = pdf.setinfo
+local pdfsetcatalog = pdf.setcatalog
+local pdfsetnames = pdf.setnames
+local pdfsettrailer = pdf.settrailer
+
+local pdfsetpageresources = pdf.setpageresources
+local pdfsetpageattributes = pdf.setpageattributes
+local pdfsetpagesattributes = pdf.setpagesattributes
+
+local pdfgetpos = pdf.getpos
+local pdfgethpos = pdf.gethpos
+local pdfgetvpos = pdf.getvpos
+local pdfgetmatrix = pdf.getmatrix
+local pdfhasmatrix = pdf.hasmatrix
+
+local pdfreserveobject = pdf.reserveobj
+local pdfimmediateobject = pdf.immediateobj
+local pdfdeferredobject = pdf.obj
+local pdfreferenceobject = pdf.refobj
+
+function pdf.setinfo () report_blocked("'pdf.%s' is not supported","setinfo") end -- use lpdf.addtoinfo etc
+function pdf.setcatalog () report_blocked("'pdf.%s' is not supported","setcatalog") end
+function pdf.setnames () report_blocked("'pdf.%s' is not supported","setnames") end
+function pdf.settrailer () report_blocked("'pdf.%s' is not supported","settrailer") end
+function pdf.setpageresources () report_blocked("'pdf.%s' is not supported","setpageresources") end
+function pdf.setpageattributes () report_blocked("'pdf.%s' is not supported","setpageattributes") end
+function pdf.setpagesattributes() report_blocked("'pdf.%s' is not supported","setpagesattributes") end
+
+function pdf.registerannot() report_blocked("'pdf.%s' is not supported","registerannot") end
local trace_finalizers = false trackers.register("backend.finalizers", function(v) trace_finalizers = v end)
local trace_resources = false trackers.register("backend.resources", function(v) trace_resources = v end)
local trace_objects = false trackers.register("backend.objects", function(v) trace_objects = v end)
local trace_detail = false trackers.register("backend.detail", function(v) trace_detail = v end)
-local report_objects = logs.reporter("backend","objects")
-local report_finalizing = logs.reporter("backend","finalizing")
-
-local backends = backends
-
-backends.pdf = backends.pdf or {
+local backends = backends
+local pdfbackend = {
comment = "backend for directly generating pdf output",
nodeinjections = { },
codeinjections = { },
registrations = { },
tables = { },
}
+backends.pdf = pdfbackend
+lpdf = lpdf or { }
+local lpdf = lpdf
+
+local codeinjections = pdfbackend.codeinjections
+local nodeinjections = pdfbackend.nodeinjections
+
+codeinjections.getpos = pdfgetpos lpdf.getpos = pdfgetpos
+codeinjections.gethpos = pdfgethpos lpdf.gethpos = pdfgethpos
+codeinjections.getvpos = pdfgetvpos lpdf.getvpos = pdfgetvpos
+codeinjections.hasmatrix = pdfhasmatrix lpdf.hasmatrix = pdfhasmatrix
+codeinjections.getmatrix = pdfgetmatrix lpdf.getmatrix = pdfgetmatrix
+
+function lpdf.transform(llx,lly,urx,ury)
+ if pdfhasmatrix() then
+ local sx, rx, ry, sy = pdfgetmatrix()
+ local w, h = urx - llx, ury - lly
+ return llx, lly, llx + sy*w - ry*h, lly + sx*h - rx*w
+ else
+ return llx, lly, urx, ury
+ end
+end
-lpdf = lpdf or { }
-local lpdf = lpdf
+-- function lpdf.rectangle(width,height,depth)
+-- local h, v = pdfgetpos()
+-- local llx, lly, urx, ury
+-- if pdfhasmatrix() then
+-- local sx, rx, ry, sy = pdfgetmatrix()
+-- llx = 0
+-- lly = -depth
+-- -- llx = ry * depth
+-- -- lly = -sx * depth
+-- urx = sy * width - ry * height
+-- ury = sx * height - rx * width
+-- else
+-- llx = 0
+-- lly = -depth
+-- urx = width
+-- ury = height
+-- return (h+llx)*factor, (v+lly)*factor, (h+urx)*factor, (v+ury)*factor
+-- end
+-- end
+
+function lpdf.rectangle(width,height,depth)
+ local h, v = pdfgetpos()
+ if pdfhasmatrix() then
+ local sx, rx, ry, sy = pdfgetmatrix()
+ -- return (h+ry*depth)*factor, (v-sx*depth)*factor, (h+sy*width-ry*height)*factor, (v+sx*height-rx*width)*factor
+ return h *factor, (v- depth)*factor, (h+sy*width-ry*height)*factor, (v+sx*height-rx*width)*factor
+ else
+ return h *factor, (v- depth)*factor, (h+ width )*factor, (v+ height )*factor
+ end
+end
+
+--
local function tosixteen(str) -- an lpeg might be faster (no table)
if not str or str == "" then
@@ -49,7 +192,8 @@ local function tosixteen(str) -- an lpeg might be faster (no table)
if b < 0x10000 then
r[n] = format("%04x",b)
else
- r[n] = format("%04x%04x",b/1024+0xD800,b%1024+0xDC00)
+ -- r[n] = format("%04x%04x",b/1024+0xD800,b%1024+0xDC00)
+ r[n] = format("%04x%04x",floor(b/1024),b%1024+0xDC00)
end
end
n = n + 1
@@ -90,13 +234,13 @@ end
lpdf.toeight = toeight
---~ local escaped = lpeg.Cs((lpeg.S("\0\t\n\r\f ()[]{}/%")/function(s) return format("#%02X",byte(s)) end + lpeg.P(1))^0)
-
---~ local function cleaned(str)
---~ return (str and str ~= "" and lpegmatch(escaped,str)) or ""
---~ end
-
---~ lpdf.cleaned = cleaned -- not public yet
+-- local escaped = lpeg.Cs((lpeg.S("\0\t\n\r\f ()[]{}/%")/function(s) return format("#%02X",byte(s)) end + lpeg.P(1))^0)
+--
+-- local function cleaned(str)
+-- return (str and str ~= "" and lpegmatch(escaped,str)) or ""
+-- end
+--
+-- lpdf.cleaned = cleaned -- not public yet
local function merge_t(a,b)
local t = { }
@@ -111,16 +255,16 @@ local f_dictionary = formatters["<< % t >>"]
local f_key_array = formatters["/%s [ % t ]"]
local f_array = formatters["[ % t ]"]
+-- local f_key_value = formatters["/%s %s"]
+-- local f_key_dictionary = formatters["/%s <<% t>>"]
+-- local f_dictionary = formatters["<<% t>>"]
+-- local f_key_array = formatters["/%s [% t]"]
+-- local f_array = formatters["[% t]"]
+
local tostring_a, tostring_d
tostring_d = function(t,contentonly,key)
- if not next(t) then
- if contentonly then
- return ""
- else
- return "<< >>"
- end
- else
+ if next(t) then
local r, rn = { }, 0
for k, v in next, t do
rn = rn + 1
@@ -149,18 +293,16 @@ tostring_d = function(t,contentonly,key)
else
return f_dictionary(r)
end
+ elseif contentonly then
+ return ""
+ else
+ return "<< >>"
end
end
tostring_a = function(t,contentonly,key)
local tn = #t
- if tn == 0 then
- if contentonly then
- return ""
- else
- return "[ ]"
- end
- else
+ if tn ~= 0 then
local r = { }
for k=1,tn do
local v = t[k]
@@ -190,10 +332,14 @@ tostring_a = function(t,contentonly,key)
else
return f_array(r)
end
+ elseif contentonly then
+ return ""
+ else
+ return "[ ]"
end
end
-local tostring_x = function(t) return concat(t, " ") end
+local tostring_x = function(t) return concat(t," ") end
local tostring_s = function(t) return toeight(t[1]) end
local tostring_u = function(t) return tosixteen(t[1]) end
local tostring_n = function(t) return tostring(t[1]) end -- tostring not needed
@@ -206,7 +352,7 @@ local tostring_r = function(t) local n = t[1] return n and n > 0 and (n .. " 0 R
local tostring_v = function(t)
local s = t[1]
if type(s) == "table" then
- return concat(s,"")
+ return concat(s)
else
return s
end
@@ -324,12 +470,27 @@ local function pdfboolean(b,default)
end
end
-local function pdfreference(r)
- return setmetatable({ r or 0 },mt_r)
+local r_zero = setmetatable({ 0 },mt_r)
+
+local function pdfreference(r) -- maybe make a weak table
+ if r and r ~= 0 then
+ return setmetatable({ r },mt_r)
+ else
+ return r_zero
+ end
end
+local v_zero = setmetatable({ 0 },mt_v)
+local v_empty = setmetatable({ "" },mt_v)
+
local function pdfverbose(t) -- maybe check for type
- return setmetatable({ t or "" },mt_v)
+ if t == 0 then
+ return v_zero
+ elseif t == "" then
+ return v_empty
+ else
+ return setmetatable({ t },mt_v)
+ end
end
lpdf.stream = pdfstream -- THIS WILL PROBABLY CHANGE
@@ -344,37 +505,19 @@ lpdf.boolean = pdfboolean
lpdf.reference = pdfreference
lpdf.verbose = pdfverbose
--- n = pdf.obj(n, str)
--- n = pdf.obj(n, "file", filename)
--- n = pdf.obj(n, "stream", streamtext, attrtext)
--- n = pdf.obj(n, "streamfile", filename, attrtext)
-
--- we only use immediate objects
-
--- todo: tracing
-
local names, cache = { }, { }
function lpdf.reserveobject(name)
- if name == "annot" then
- -- catch misuse
- return pdfreserveobject("annot")
- else
- local r = pdfreserveobject()
- if name then
- names[name] = r
- if trace_objects then
- report_objects("reserving number %a under name %a",r,name)
- end
- elseif trace_objects then
- report_objects("reserving number %a",r)
+ local r = pdfreserveobject() -- we don't support "annot"
+ if name then
+ names[name] = r
+ if trace_objects then
+ report_objects("reserving number %a under name %a",r,name)
end
- return r
+ elseif trace_objects then
+ report_objects("reserving number %a",r)
end
-end
-
-function lpdf.reserveannotation()
- return pdfreserveobject("annot")
+ return r
end
-- lpdf.immediateobject = pdfimmediateobject
@@ -382,11 +525,29 @@ end
-- lpdf.object = pdfdeferredobject
-- lpdf.referenceobject = pdfreferenceobject
-lpdf.pagereference = pdf.pageref or tex.pdfpageref
-lpdf.registerannotation = pdf.registerannot
+local pagereference = pdf.pageref or tex.pdfpageref
+local nofpages = 0
-function lpdf.delayedobject(data) -- we will get rid of this one
- local n = pdfdeferredobject(data)
+function lpdf.pagereference(n)
+ if nofpages == 0 then
+ nofpages = structures.pages.nofpages
+ if nofpages == 0 then
+ nofpages = 1
+ end
+ end
+ if n > nofpages then
+ return pagereference(nofpages) -- or 1, could be configureable
+ else
+ return pagereference(n)
+ end
+end
+
+function lpdf.delayedobject(data,n)
+ if n then
+ pdfdeferredobject(n,data)
+ else
+ n = pdfdeferredobject(data)
+ end
pdfreferenceobject(n)
return n
end
@@ -483,60 +644,10 @@ function lpdf.shareobjectreference(content)
end
end
---~ local d = lpdf.dictionary()
---~ local e = lpdf.dictionary { ["e"] = "abc", x = lpdf.dictionary { ["f"] = "ABC" } }
---~ local f = lpdf.dictionary { ["f"] = "ABC" }
---~ local a = lpdf.array { lpdf.array { lpdf.string("xxx") } }
-
---~ print(a)
---~ os.exit()
-
---~ d["test"] = lpdf.string ("test")
---~ d["more"] = "more"
---~ d["bool"] = true
---~ d["numb"] = 1234
---~ d["oeps"] = lpdf.dictionary { ["hans"] = "ton" }
---~ d["whow"] = lpdf.array { lpdf.string("ton") }
-
---~ a[#a+1] = lpdf.string("xxx")
---~ a[#a+1] = lpdf.string("yyy")
-
---~ d.what = a
-
---~ print(e)
-
---~ local d = lpdf.dictionary()
---~ d["abcd"] = { 1, 2, 3, "test" }
---~ print(d)
---~ print(d())
-
---~ local d = lpdf.array()
---~ d[#d+1] = 1
---~ d[#d+1] = 2
---~ d[#d+1] = 3
---~ d[#d+1] = "test"
---~ print(d)
-
---~ local d = lpdf.array()
---~ d[#d+1] = { 1, 2, 3, "test" }
---~ print(d)
-
---~ local d = lpdf.array()
---~ d[#d+1] = { a=1, b=2, c=3, d="test" }
---~ print(d)
-
---~ local s = lpdf.constant("xx")
---~ print(s) -- fails somehow
---~ print(s()) -- fails somehow
-
---~ local s = lpdf.boolean(false)
---~ s.value = true
---~ print(s)
---~ print(s())
-
-- three priority levels, default=2
-local pagefinalizers, documentfinalizers = { { }, { }, { } }, { { }, { }, { } }
+local pagefinalizers = { { }, { }, { } }
+local documentfinalizers = { { }, { }, { } }
local pageresources, pageattributes, pagesattributes
@@ -549,9 +660,9 @@ end
resetpageproperties()
local function setpageproperties()
- pdf.pageresources = pageresources ()
- pdf.pageattributes = pageattributes ()
- pdf.pagesattributes = pagesattributes()
+ pdfsetpageresources (pageresources ())
+ pdfsetpageattributes (pageattributes ())
+ pdfsetpagesattributes(pagesattributes())
end
local function addtopageresources (k,v) pageresources [k] = v end
@@ -605,8 +716,8 @@ end
lpdf.registerpagefinalizer = registerpagefinalizer
lpdf.registerdocumentfinalizer = registerdocumentfinalizer
-function lpdf.finalizepage()
- if not environment.initex then
+function lpdf.finalizepage(shipout)
+ if shipout and not environment.initex then
-- resetpageproperties() -- maybe better before
run(pagefinalizers,"page")
setpageproperties()
@@ -624,9 +735,27 @@ function lpdf.finalizedocument()
end
end
-backends.pdf.codeinjections.finalizepage = lpdf.finalizepage -- will go when we have hook
+-- codeinjections.finalizepage = lpdf.finalizepage -- no longer triggered at the tex end
+
+if not callbacks.register("finish_pdfpage", lpdf.finalizepage) then
+
+ local find_tail = nodes.tail
+ local latelua_node = nodes.pool.latelua
+
+ function nodeinjections.finalizepage(head)
+ local t = find_tail(head.list)
+ if t then
+ local n = latelua_node("lpdf.finalizepage(true)") -- last in the shipout
+ t.next = n
+ n.prev = t
+ end
+ return head, true
+ end
+
+ nodes.tasks.appendaction("shipouts","normalizers","backends.pdf.nodeinjections.finalizepage")
+
+end
---~ callbacks.register("finish_pdfpage", lpdf.finalizepage)
callbacks.register("finish_pdffile", lpdf.finalizedocument)
-- some minimal tracing, handy for checking the order
@@ -646,15 +775,34 @@ lpdf.protectresources = true
local catalog = pdfdictionary { Type = pdfconstant("Catalog") } -- nicer, but when we assign we nil the Type
local info = pdfdictionary { Type = pdfconstant("Info") } -- nicer, but when we assign we nil the Type
-local names = pdfdictionary { Type = pdfconstant("Names") } -- nicer, but when we assign we nil the Type
+----- names = pdfdictionary { Type = pdfconstant("Names") } -- nicer, but when we assign we nil the Type
-local function flushcatalog() if not environment.initex then trace_flush("catalog") catalog.Type = nil pdf.catalog = catalog() end end
-local function flushinfo () if not environment.initex then trace_flush("info") info .Type = nil pdf.info = info () end end
-local function flushnames () if not environment.initex then trace_flush("names") names .Type = nil pdf.names = names () end end
+local function flushcatalog() if not environment.initex then trace_flush("catalog") catalog.Type = nil pdfsetcatalog(catalog()) end end
+local function flushinfo () if not environment.initex then trace_flush("info") info .Type = nil pdfsetinfo (info ()) end end
+-------------- flushnames () if not environment.initex then trace_flush("names") names .Type = nil pdfsetnames (names ()) end end
function lpdf.addtocatalog(k,v) if not (lpdf.protectresources and catalog[k]) then trace_set("catalog",k) catalog[k] = v end end
function lpdf.addtoinfo (k,v) if not (lpdf.protectresources and info [k]) then trace_set("info", k) info [k] = v end end
-function lpdf.addtonames (k,v) if not (lpdf.protectresources and names [k]) then trace_set("names", k) names [k] = v end end
+-------- lpdf.addtonames (k,v) if not (lpdf.protectresources and names [k]) then trace_set("names", k) names [k] = v end end
+
+local names = pdfdictionary {
+ -- Type = pdfconstant("Names")
+}
+
+local function flushnames()
+ if next(names) and not environment.initex then
+ names.Type = pdfconstant("Names")
+ trace_flush("names")
+ lpdf.addtocatalog("Names",pdfreference(pdfimmediateobject(tostring(names))))
+ end
+end
+
+function lpdf.addtonames(k,v)
+ if not (lpdf.protectresources and names [k]) then
+ trace_set("names", k)
+ names [k] = v
+ end
+end
local dummy = pdfreserveobject() -- else bug in hvmd due so some internal luatex conflict
@@ -704,9 +852,9 @@ registerdocumentfinalizer(flushcolorspaces,3,"color spaces")
registerdocumentfinalizer(flushpatterns,3,"patterns")
registerdocumentfinalizer(flushshades,3,"shades")
+registerdocumentfinalizer(flushnames,3,"names") -- before catalog
registerdocumentfinalizer(flushcatalog,3,"catalog")
registerdocumentfinalizer(flushinfo,3,"info")
-registerdocumentfinalizer(flushnames,3,"names") -- before catalog
registerpagefinalizer(checkextgstates,3,"extended graphic states")
registerpagefinalizer(checkcolorspaces,3,"color spaces")
@@ -717,7 +865,7 @@ registerpagefinalizer(checkshades,3,"shades")
function lpdf.rotationcm(a)
local s, c = sind(a), cosd(a)
- return format("%0.6f %0.6f %0.6f %0.6f 0 0 cm",c,s,-s,c)
+ return format("%0.6F %0.6F %0.6F %0.6F 0 0 cm",c,s,-s,c)
end
-- ! -> universaltime
@@ -794,29 +942,56 @@ end
-- lpdf.addtoinfo("ConTeXt.Jobname", environment.jobname)
-- lpdf.addtoinfo("ConTeXt.Url", "www.pragma-ade.com")
-if not pdfreferenceobject then
-
- local delayed = { }
-
- local function flush()
- local n = 0
- for k,v in next, delayed do
- pdfimmediateobject(k,v)
- n = n + 1
- end
- if trace_objects then
- report_objects("%s objects flushed",n)
- end
- delayed = { }
- end
-
- lpdf.registerdocumentfinalizer(flush,3,"objects") -- so we need a final flush too
- lpdf.registerpagefinalizer (flush,3,"objects") -- somehow this lags behind .. I need to look into that some day
-
- function lpdf.delayedobject(data)
- local n = pdfreserveobject()
- delayed[n] = data
- return n
- end
+-- if not pdfreferenceobject then
+--
+-- local delayed = { }
+--
+-- local function flush()
+-- local n = 0
+-- for k,v in next, delayed do
+-- pdfimmediateobject(k,v)
+-- n = n + 1
+-- end
+-- if trace_objects then
+-- report_objects("%s objects flushed",n)
+-- end
+-- delayed = { }
+-- end
+--
+-- lpdf.registerdocumentfinalizer(flush,3,"objects") -- so we need a final flush too
+-- lpdf.registerpagefinalizer (flush,3,"objects") -- somehow this lags behind .. I need to look into that some day
+--
+-- function lpdf.delayedobject(data)
+-- local n = pdfreserveobject()
+-- delayed[n] = data
+-- return n
+-- end
+--
+-- end
-end
+-- setmetatable(pdf, {
+-- __index = function(t,k)
+-- if k == "info" then return pdf.getinfo()
+-- elseif k == "catalog" then return pdf.getcatalog()
+-- elseif k == "names" then return pdf.getnames()
+-- elseif k == "trailer" then return pdf.gettrailer()
+-- elseif k == "pageattribute" then return pdf.getpageattribute()
+-- elseif k == "pageattributes" then return pdf.getpageattributes()
+-- elseif k == "pageresources" then return pdf.getpageresources()
+-- elseif
+-- return nil
+-- end
+-- end,
+-- __newindex = function(t,k,v)
+-- if k == "info" then return pdf.setinfo(v)
+-- elseif k == "catalog" then return pdf.setcatalog(v)
+-- elseif k == "names" then return pdf.setnames(v)
+-- elseif k == "trailer" then return pdf.settrailer(v)
+-- elseif k == "pageattribute" then return pdf.setpageattribute(v)
+-- elseif k == "pageattributes" then return pdf.setpageattributes(v)
+-- elseif k == "pageresources" then return pdf.setpageresources(v)
+-- else
+-- rawset(t,k,v)
+-- end
+-- end,
+-- })
diff --git a/Master/texmf-dist/tex/context/base/lpdf-mis.lua b/Master/texmf-dist/tex/context/base/lpdf-mis.lua
index 174d17427fa..6efbd38820e 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-mis.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-mis.lua
@@ -16,7 +16,7 @@ if not modules then modules = { } end modules ['lpdf-mis'] = {
-- course there are a couple of more changes.
local next, tostring = next, tostring
-local format, gsub = string.format, string.gsub
+local format, gsub, formatters = string.format, string.gsub, string.formatters
local texset = tex.set
local backends, lpdf, nodes = backends, lpdf, nodes
@@ -41,8 +41,17 @@ local pdfverbose = lpdf.verbose
local pdfstring = lpdf.string
local pdfflushobject = lpdf.flushobject
local pdfflushstreamobject = lpdf.flushstreamobject
+local pdfaction = lpdf.action
+
+local formattedtimestamp = lpdf.pdftimestamp
+local adddocumentextgstate = lpdf.adddocumentextgstate
+local addtocatalog = lpdf.addtocatalog
+local addtoinfo = lpdf.addtoinfo
+local addtopageattributes = lpdf.addtopageattributes
+local addtonames = lpdf.addtonames
local variables = interfaces.variables
+local v_stop = variables.stop
local positive = register(pdfliteral("/GSpositive gs"))
local negative = register(pdfliteral("/GSnegative gs"))
@@ -59,8 +68,8 @@ local function initializenegative()
}
local negative = pdfdictionary { Type = g, TR = pdfreference(pdfflushstreamobject("{ 1 exch sub }",d)) }
local positive = pdfdictionary { Type = g, TR = pdfconstant("Identity") }
- lpdf.adddocumentextgstate("GSnegative", pdfreference(pdfflushobject(negative)))
- lpdf.adddocumentextgstate("GSpositive", pdfreference(pdfflushobject(positive)))
+ adddocumentextgstate("GSnegative", pdfreference(pdfflushobject(negative)))
+ adddocumentextgstate("GSpositive", pdfreference(pdfflushobject(positive)))
initializenegative = nil
end
@@ -68,8 +77,8 @@ local function initializeoverprint()
local g = pdfconstant("ExtGState")
local knockout = pdfdictionary { Type = g, OP = false, OPM = 0 }
local overprint = pdfdictionary { Type = g, OP = true, OPM = 1 }
- lpdf.adddocumentextgstate("GSknockout", pdfreference(pdfflushobject(knockout)))
- lpdf.adddocumentextgstate("GSoverprint", pdfreference(pdfflushobject(overprint)))
+ adddocumentextgstate("GSknockout", pdfreference(pdfflushobject(knockout)))
+ adddocumentextgstate("GSoverprint", pdfreference(pdfflushobject(overprint)))
initializeoverprint = nil
end
@@ -91,8 +100,6 @@ function nodeinjections.negative()
return copy_node(negative)
end
---
-
-- function codeinjections.addtransparencygroup()
-- -- png: /CS /DeviceRGB /I true
-- local d = pdfdictionary {
@@ -100,7 +107,7 @@ end
-- I = true,
-- K = true,
-- }
--- lpdf.registerpagefinalizer(function() lpdf.addtopageattributes("Group",d) end) -- hm
+-- lpdf.registerpagefinalizer(function() addtopageattributes("Group",d) end) -- hm
-- end
-- actions (todo: store and update when changed)
@@ -125,10 +132,10 @@ end
local function flushdocumentactions()
if opendocument then
- lpdf.addtocatalog("OpenAction",lpdf.action(opendocument))
+ addtocatalog("OpenAction",pdfaction(opendocument))
end
if closedocument then
- lpdf.addtocatalog("CloseAction",lpdf.action(closedocument))
+ addtocatalog("CloseAction",pdfaction(closedocument))
end
end
@@ -136,12 +143,12 @@ local function flushpageactions()
if openpage or closepage then
local d = pdfdictionary()
if openpage then
- d.O = lpdf.action(openpage)
+ d.O = pdfaction(openpage)
end
if closepage then
- d.C = lpdf.action(closepage)
+ d.C = pdfaction(closepage)
end
- lpdf.addtopageattributes("AA",d)
+ addtopageattributes("AA",d)
end
end
@@ -168,37 +175,37 @@ local function setupidentity()
if not title or title == "" then
title = tex.jobname
end
- lpdf.addtoinfo("Title", pdfunicode(title), title)
+ addtoinfo("Title", pdfunicode(title), title)
local subtitle = identity.subtitle or ""
if subtitle ~= "" then
- lpdf.addtoinfo("Subject", pdfunicode(subtitle), subtitle)
+ addtoinfo("Subject", pdfunicode(subtitle), subtitle)
end
local author = identity.author or ""
if author ~= "" then
- lpdf.addtoinfo("Author", pdfunicode(author), author) -- '/Author' in /Info, 'Creator' in XMP
+ addtoinfo("Author", pdfunicode(author), author) -- '/Author' in /Info, 'Creator' in XMP
end
local creator = identity.creator or ""
if creator ~= "" then
- lpdf.addtoinfo("Creator", pdfunicode(creator), creator) -- '/Creator' in /Info, 'CreatorTool' in XMP
+ addtoinfo("Creator", pdfunicode(creator), creator) -- '/Creator' in /Info, 'CreatorTool' in XMP
end
- lpdf.addtoinfo("CreationDate", pdfstring(lpdf.pdftimestamp(lpdf.timestamp())))
+ local currenttimestamp = lpdf.timestamp()
+ addtoinfo("CreationDate", pdfstring(formattedtimestamp(currenttimestamp)))
local date = identity.date or ""
- local pdfdate = lpdf.pdftimestamp(date)
+ local pdfdate = formattedtimestamp(date)
if pdfdate then
- lpdf.addtoinfo("ModDate", pdfstring(pdfdate), date)
+ addtoinfo("ModDate", pdfstring(pdfdate), date)
else
-- users should enter the date in 2010-01-19T23:27:50+01:00 format
-- and if not provided that way we use the creation time instead
- date = lpdf.timestamp()
- lpdf.addtoinfo("ModDate", pdfstring(lpdf.pdftimestamp(date)), date)
+ addtoinfo("ModDate", pdfstring(formattedtimestamp(currenttimestamp)), currenttimestamp)
end
local keywords = identity.keywords or ""
if keywords ~= "" then
keywords = gsub(keywords, "[%s,]+", " ")
- lpdf.addtoinfo("Keywords",pdfunicode(keywords), keywords)
+ addtoinfo("Keywords",pdfunicode(keywords), keywords)
end
local id = lpdf.id()
- lpdf.addtoinfo("ID", pdfstring(id), id) -- needed for pdf/x
+ addtoinfo("ID", pdfstring(id), id) -- needed for pdf/x
done = true
else
-- no need for a message
@@ -225,7 +232,7 @@ local function flushjavascripts()
a[#a+1] = pdfstring(name)
a[#a+1] = pdfreference(pdfflushobject(j))
end
- lpdf.addtonames("JavaScript",pdfreference(pdfflushobject(pdfdictionary{ Names = a })))
+ addtonames("JavaScript",pdfreference(pdfflushobject(pdfdictionary{ Names = a })))
end
end
@@ -284,16 +291,16 @@ local function documentspecification()
layout = layout and pdfconstant(layout)
fit = fit and pdfdictionary { FitWindow = true }
if layout then
- lpdf.addtocatalog("PageLayout",layout)
+ addtocatalog("PageLayout",layout)
end
if mode then
- lpdf.addtocatalog("PageMode",mode)
+ addtocatalog("PageMode",mode)
end
if fit then
- lpdf.addtocatalog("ViewerPreferences",fit)
+ addtocatalog("ViewerPreferences",fit)
end
- lpdf.addtoinfo ("Trapped", pdfconstant("False")) -- '/Trapped' in /Info, 'Trapped' in XMP
- lpdf.addtocatalog("Version", pdfconstant(format("1.%s",tex.pdfminorversion)))
+ addtoinfo ("Trapped", pdfconstant("False")) -- '/Trapped' in /Info, 'Trapped' in XMP
+ addtocatalog("Version", pdfconstant(format("1.%s",tex.pdfminorversion)))
end
end
@@ -302,7 +309,7 @@ end
local factor = number.dimenfactors.bp
local function boxvalue(n) -- we could share them
- return pdfverbose(format("%0.4f",factor * n))
+ return pdfverbose(formatters["%0.4F"](factor * n))
end
local function pagespecification()
@@ -313,10 +320,10 @@ local function pagespecification()
boxvalue(width-leftoffset),
boxvalue(pageheight-topoffset),
}
- lpdf.addtopageattributes("CropBox",box) -- mandate for rendering
- lpdf.addtopageattributes("TrimBox",box) -- mandate for pdf/x
- -- lpdf.addtopageattributes("BleedBox",box)
- -- lpdf.addtopageattributes("ArtBox",box)
+ addtopageattributes("CropBox",box) -- mandate for rendering
+ addtopageattributes("TrimBox",box) -- mandate for pdf/x
+ -- addtopageattributes("BleedBox",box)
+ -- addtopageattributes("ArtBox",box)
end
lpdf.registerpagefinalizer(pagespecification,"page specification")
@@ -337,34 +344,85 @@ local map = {
characters = "a",
}
+-- local function featurecreep()
+-- local pages, lastconversion, list = structures.pages.tobesaved, nil, pdfarray()
+-- local getstructureset = structures.sets.get
+-- for i=1,#pages do
+-- local p = pages[i]
+-- if not p then
+-- return -- fatal error
+-- else
+-- local numberdata = p.numberdata
+-- if numberdata then
+-- local conversionset = numberdata.conversionset
+-- if conversionset then
+-- local conversion = getstructureset("structure:conversions",p.block,conversionset,1,"numbers")
+-- if conversion ~= lastconversion then
+-- lastconversion = conversion
+-- list[#list+1] = i - 1 -- pdf starts numbering at 0
+-- list[#list+1] = pdfdictionary { S = pdfconstant(map[conversion] or map.numbers) }
+-- end
+-- end
+-- end
+-- if not lastconversion then
+-- lastconversion = "numbers"
+-- list[#list+1] = i - 1 -- pdf starts numbering at 0
+-- list[#list+1] = pdfdictionary { S = pdfconstant(map.numbers) }
+-- end
+-- end
+-- end
+-- addtocatalog("PageLabels", pdfdictionary { Nums = list })
+-- end
+
local function featurecreep()
- local pages, lastconversion, list = structures.pages.tobesaved, nil, pdfarray()
- local getstructureset = structures.sets.get
+ local pages = structures.pages.tobesaved
+ local list = pdfarray()
+ local getset = structures.sets.get
+ local stopped = false
+ local oldlabel = nil
+ local olconversion = nil
for i=1,#pages do
local p = pages[i]
if not p then
return -- fatal error
+ end
+ local label = p.viewerprefix or ""
+ if p.status == v_stop then
+ if not stopped then
+ list[#list+1] = i - 1 -- pdf starts numbering at 0
+ list[#list+1] = pdfdictionary {
+ P = pdfunicode(label),
+ }
+ stopped = true
+ end
+ oldlabel = nil
+ oldconversion = nil
+ stopped = false
else
local numberdata = p.numberdata
+ local conversion = nil
+ local number = p.number
if numberdata then
local conversionset = numberdata.conversionset
if conversionset then
- local conversion = getstructureset("structure:conversions",p.block,conversionset,1,"numbers")
- if conversion ~= lastconversion then
- lastconversion = conversion
- list[#list+1] = i - 1 -- pdf starts numbering at 0
- list[#list+1] = pdfdictionary { S = pdfconstant(map[conversion] or map.numbers) }
- end
+ conversion = getset("structure:conversions",p.block,conversionset,1,"numbers")
end
end
- if not lastconversion then
- lastconversion = "numbers"
+ conversion = conversion and map[conversion] or map.numbers
+ if number == 1 or oldlabel ~= label or oldconversion ~= conversion then
list[#list+1] = i - 1 -- pdf starts numbering at 0
- list[#list+1] = pdfdictionary { S = pdfconstant(map.numbers) }
+ list[#list+1] = pdfdictionary {
+ S = pdfconstant(conversion),
+ St = number,
+ P = label ~= "" and pdfunicode(label) or nil,
+ }
end
+ oldlabel = label
+ oldconversion = conversion
+ stopped = false
end
end
- lpdf.addtocatalog("PageLabels", pdfdictionary { Nums = list })
+ addtocatalog("PageLabels", pdfdictionary { Nums = list })
end
lpdf.registerdocumentfinalizer(featurecreep,"featurecreep")
diff --git a/Master/texmf-dist/tex/context/base/lpdf-mov.lua b/Master/texmf-dist/tex/context/base/lpdf-mov.lua
index 41db97e0c52..87375e4ce74 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-mov.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-mov.lua
@@ -11,10 +11,10 @@ local format = string.format
local lpdf = lpdf
local nodeinjections = backends.pdf.nodeinjections
-local pdfannotation_node = nodes.pool.pdfannotation
local pdfconstant = lpdf.constant
local pdfdictionary = lpdf.dictionary
local pdfarray = lpdf.array
+local pdfborder = lpdf.border
local write_node = node.write
function nodeinjections.insertmovie(specification)
@@ -31,14 +31,16 @@ function nodeinjections.insertmovie(specification)
ShowControls = (specification.controls and true) or false,
Mode = (specification["repeat"] and pdfconstant("Repeat")) or nil,
}
+ local bs, bc = pdfborder()
local action = pdfdictionary {
Subtype = pdfconstant("Movie"),
- Border = pdfarray { 0, 0, 0 },
+ Border = bs,
+ C = bc,
T = format("movie %s",specification.label),
Movie = moviedict,
A = controldict,
}
- write_node(pdfannotation_node(width,height,0,action())) -- test: context(...)
+ write_node(nodeinjections.annotation(width,height,0,action())) -- test: context(...)
end
function nodeinjections.insertsound(specification)
@@ -51,13 +53,15 @@ function nodeinjections.insertsound(specification)
local sounddict = pdfdictionary {
F = soundclip.filename
}
+ local bs, bc = pdfborder()
local action = pdfdictionary {
Subtype = pdfconstant("Movie"),
- Border = pdfarray { 0, 0, 0 },
+ Border = bs,
+ C = bc,
T = format("sound %s",specification.label),
Movie = sounddict,
A = controldict,
}
- write_node(pdfannotation_node(0,0,0,action())) -- test: context(...)
+ write_node(nodeinjections.annotation(0,0,0,action())) -- test: context(...)
end
end
diff --git a/Master/texmf-dist/tex/context/base/lpdf-nod.lua b/Master/texmf-dist/tex/context/base/lpdf-nod.lua
index 60d3fcd5b2a..6295947d008 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-nod.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-nod.lua
@@ -6,21 +6,29 @@ if not modules then modules = { } end modules ['lpdf-nod'] = {
license = "see context related readme files"
}
-local format = string.format
+local type = type
-local copy_node = node.copy
-local new_node = node.new
+local formatters = string.formatters
-local nodepool = nodes.pool
-local register = nodepool.register
local whatsitcodes = nodes.whatsitcodes
local nodeinjections = backends.nodeinjections
-local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) pdfliteral.mode = 1
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local setfield = nuts.setfield
+
+local copy_node = nuts.copy
+local new_node = nuts.new
+
+local nodepool = nuts.pool
+local register = nodepool.register
+
+local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) setfield(pdfliteral,"mode",1)
local pdfsave = register(new_node("whatsit", whatsitcodes.pdfsave))
local pdfrestore = register(new_node("whatsit", whatsitcodes.pdfrestore))
local pdfsetmatrix = register(new_node("whatsit", whatsitcodes.pdfsetmatrix))
-local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) pdfdest.named_id = 1 -- xyz_zoom untouched
+local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) setfield(pdfdest,"named_id",1) -- xyz_zoom untouched
local pdfannot = register(new_node("whatsit", whatsitcodes.pdfannot))
local variables = interfaces.variables
@@ -38,14 +46,14 @@ local views = { -- beware, we do support the pdf keys but this is *not* official
function nodepool.pdfliteral(str)
local t = copy_node(pdfliteral)
- t.data = str
+ setfield(t,"data",str)
return t
end
function nodepool.pdfdirect(str)
local t = copy_node(pdfliteral)
- t.data = str
- t.mode = 1
+ setfield(t,"data",str)
+ setfield(t,"mode",1)
return t
end
@@ -57,9 +65,37 @@ function nodepool.pdfrestore()
return copy_node(pdfrestore)
end
-function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
+function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty) -- todo: tx ty
local t = copy_node(pdfsetmatrix)
- t.data = format("%s %s %s %s",rx or 0,sx or 0,sy or 0,ry or 0) -- todo: tx ty
+ if type(rx) == "string" then
+ setfield(t,"data",rx)
+ else
+ if not rx then
+ rx = 1
+ elseif rx == 0 then
+ rx = 0.0001
+ end
+ if not ry then
+ ry = 1
+ elseif ry == 0 then
+ ry = 0.0001
+ end
+ if not sx then
+ sx = 0
+ end
+ if not sy then
+ sy = 0
+ end
+ if sx == 0 and sy == 0 then
+ if rx == 1 and ry == 1 then
+ setfield(t,"data","1 0 0 1")
+ else
+ setfield(t,"data",formatters["%0.6F 0 0 %0.6F"](rx,ry))
+ end
+ else
+ setfield(t,"data",formatters["%0.6F %0.6F %0.6F %0.6F"](rx,sx,sy,ry))
+ end
+ end
return t
end
@@ -67,24 +103,28 @@ nodeinjections.save = nodepool.pdfsave
nodeinjections.restore = nodepool.pdfrestore
nodeinjections.transform = nodepool.pdfsetmatrix
+-- the next one is implemented differently, using latelua
+
function nodepool.pdfannotation(w,h,d,data,n)
- local t = copy_node(pdfannot)
- if w and w ~= 0 then
- t.width = w
- end
- if h and h ~= 0 then
- t.height = h
- end
- if d and d ~= 0 then
- t.depth = d
- end
- if n then
- t.objnum = n
- end
- if data and data ~= "" then
- t.data = data
- end
- return t
+ report("don't use node based annotations!")
+ os.exit()
+-- local t = copy_node(pdfannot)
+-- if w and w ~= 0 then
+-- setfield(t,"width",w)
+-- end
+-- if h and h ~= 0 then
+-- setfield(t,"height",h)
+-- end
+-- if d and d ~= 0 then
+-- setfield(t,"depth",d)
+-- end
+-- if n then
+-- setfield(t,"objnum",n)
+-- end
+-- if data and data ~= "" then
+-- setfield(t,"data",data)
+-- end
+-- return t
end
-- (!) The next code in pdfdest.w is wrong:
@@ -101,36 +141,43 @@ end
-- so we need to force a matrix.
function nodepool.pdfdestination(w,h,d,name,view,n)
- local t = copy_node(pdfdest)
- local hasdimensions = false
- if w and w ~= 0 then
- t.width = w
- hasdimensions = true
- end
- if h and h ~= 0 then
- t.height = h
- hasdimensions = true
- end
- if d and d ~= 0 then
- t.depth = d
- hasdimensions = true
- end
- if n then
- t.objnum = n
- end
- view = views[view] or view or 1 -- fit is default
- t.dest_id = name
- t.dest_type = view
- if hasdimensions and view == 0 then -- xyz
- -- see (!) s -> m -> t -> r
- local s = copy_node(pdfsave)
- local m = copy_node(pdfsetmatrix)
- local r = copy_node(pdfrestore)
- m.data = "1 0 0 1"
- s.next = m m.next = t t.next = r
- m.prev = s t.prev = m r.prev = t
- return s -- a list
- else
- return t
- end
+ report("don't use node based destinations!")
+ os.exit()
+-- local t = copy_node(pdfdest)
+-- local hasdimensions = false
+-- if w and w ~= 0 then
+-- setfield(t,"width",w)
+-- hasdimensions = true
+-- end
+-- if h and h ~= 0 then
+-- setfield(t,"height",h)
+-- hasdimensions = true
+-- end
+-- if d and d ~= 0 then
+-- setfield(t,"depth",d)
+-- hasdimensions = true
+-- end
+-- if n then
+-- setfield(t,"objnum",n)
+-- end
+-- view = views[view] or view or 1 -- fit is default
+-- setfield(t,"dest_id",name)
+-- setfield(t,"dest_type",view)
+-- if hasdimensions and view == 0 then -- xyz
+-- -- see (!) s -> m -> t -> r
+-- -- linked
+-- local s = copy_node(pdfsave)
+-- local m = copy_node(pdfsetmatrix)
+-- local r = copy_node(pdfrestore)
+-- setfield(m,"data","1 0 0 1")
+-- setfield(s,"next",m)
+-- setfield(m,"next",t)
+-- setfield(t,"next",r)
+-- setfield(m,"prev",s)
+-- setfield(t,"prev",m)
+-- setfield(r,"prev",t)
+-- return s -- a list
+-- else
+-- return t
+-- end
end
diff --git a/Master/texmf-dist/tex/context/base/lpdf-ren.lua b/Master/texmf-dist/tex/context/base/lpdf-ren.lua
index 6af65f9de71..37b70642076 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-ren.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-ren.lua
@@ -15,47 +15,51 @@ local settings_to_array = utilities.parsers.settings_to_array
local backends, lpdf, nodes, node = backends, lpdf, nodes, node
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-local registrations = backends.pdf.registrations
-local viewerlayers = attributes.viewerlayers
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+local registrations = backends.pdf.registrations
+local viewerlayers = attributes.viewerlayers
-local references = structures.references
+local references = structures.references
-references.executers = references.executers or { }
-local executers = references.executers
+references.executers = references.executers or { }
+local executers = references.executers
-local variables = interfaces.variables
+local variables = interfaces.variables
-local v_no = variables.no
-local v_yes = variables.yes
-local v_start = variables.start
-local v_stop = variables.stop
-local v_reset = variables.reset
-local v_auto = variables.auto
-local v_random = variables.random
+local v_no = variables.no
+local v_yes = variables.yes
+local v_start = variables.start
+local v_stop = variables.stop
+local v_reset = variables.reset
+local v_auto = variables.auto
+local v_random = variables.random
-local pdfconstant = lpdf.constant
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfreference = lpdf.reference
-local pdfflushobject = lpdf.flushobject
-local pdfreserveobject = lpdf.reserveobject
+local pdfconstant = lpdf.constant
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfreference = lpdf.reference
+local pdfflushobject = lpdf.flushobject
+local pdfreserveobject = lpdf.reserveobject
-local nodepool = nodes.pool
-local register = nodepool.register
-local pdfliteral = nodepool.pdfliteral
+local addtopageattributes = lpdf.addtopageattributes
+local addtopageresources = lpdf.addtopageresources
+local addtocatalog = lpdf.addtocatalog
-local pdf_ocg = pdfconstant("OCG")
-local pdf_ocmd = pdfconstant("OCMD")
-local pdf_off = pdfconstant("OFF")
-local pdf_on = pdfconstant("ON")
-local pdf_toggle = pdfconstant("Toggle")
-local pdf_setocgstate = pdfconstant("SetOCGState")
+local nodepool = nodes.pool
+local register = nodepool.register
+local pdfliteral = nodepool.pdfliteral
-local copy_node = node.copy
+local pdf_ocg = pdfconstant("OCG")
+local pdf_ocmd = pdfconstant("OCMD")
+local pdf_off = pdfconstant("OFF")
+local pdf_on = pdfconstant("ON")
+local pdf_toggle = pdfconstant("Toggle")
+local pdf_setocgstate = pdfconstant("SetOCGState")
-local lpdf_usage = pdfdictionary { Print = pdfdictionary { PrintState = pdf_off } }
+local copy_node = node.copy
+
+local lpdf_usage = pdfdictionary { Print = pdfdictionary { PrintState = pdf_off } }
-- We can have references to layers before they are places, for instance from
-- hide and vide actions. This is why we need to be able to force usage of layers
@@ -163,7 +167,7 @@ local function flushtextlayers()
BaseState = pdf_on,
},
}
- lpdf.addtocatalog("OCProperties",d)
+ addtocatalog("OCProperties",d)
textlayers = nil
end
end
@@ -171,7 +175,7 @@ end
local function flushpagelayers() -- we can share these
if pagelayers then
- lpdf.addtopageresources("Properties",pdfreference(pagelayersreference)) -- we could cache this
+ addtopageresources("Properties",pdfreference(pagelayersreference)) -- we could cache this
end
end
@@ -342,8 +346,8 @@ function codeinjections.setpagetransition(specification)
end
delay = tonumber(delay)
if delay and delay > 0 then
- lpdf.addtopageattributes("Dur",delay)
+ addtopageattributes("Dur",delay)
end
- lpdf.addtopageattributes("Trans",d)
+ addtopageattributes("Trans",d)
end
end
diff --git a/Master/texmf-dist/tex/context/base/lpdf-swf.lua b/Master/texmf-dist/tex/context/base/lpdf-swf.lua
index 12c80036fd2..88cdcc4ec51 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-swf.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-swf.lua
@@ -28,8 +28,6 @@ local checkedkey = lpdf.checkedkey
local codeinjections = backends.pdf.codeinjections
local nodeinjections = backends.pdf.nodeinjections
-local pdfannotation_node = nodes.pool.pdfannotation
-
local trace_swf = false trackers.register("backend.swf", function(v) trace_swf = v end)
local report_swf = logs.reporter("backend","swf")
@@ -302,5 +300,5 @@ function backends.pdf.nodeinjections.insertswf(spec)
-- factor = spec.factor,
-- label = spec.label,
}
- context(pdfannotation_node(spec.width,spec.height,0,annotation())) -- the context wrap is probably also needed elsewhere
+ context(nodeinjections.annotation(spec.width,spec.height,0,annotation())) -- the context wrap is probably also needed elsewhere
end
diff --git a/Master/texmf-dist/tex/context/base/lpdf-tag.lua b/Master/texmf-dist/tex/context/base/lpdf-tag.lua
index 8cdb5f6a415..276816e8041 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-tag.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-tag.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['lpdf-tag'] = {
license = "see context related readme files"
}
+local next = next
local format, match, concat = string.format, string.match, table.concat
local lpegmatch = lpeg.match
local utfchar = utf.char
@@ -14,65 +15,82 @@ local trace_tags = false trackers.register("structures.tags", function(v) trace
local report_tags = logs.reporter("backend","tags")
-local backends, lpdf, nodes = backends, lpdf, nodes
-
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-
-local tasks = nodes.tasks
-
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfboolean = lpdf.boolean
-local pdfconstant = lpdf.constant
-local pdfreference = lpdf.reference
-local pdfunicode = lpdf.unicode
-local pdfstring = lpdf.string
-local pdfflushobject = lpdf.flushobject
-local pdfreserveobject = lpdf.reserveobject
-local pdfpagereference = lpdf.pagereference
-
-local nodepool = nodes.pool
-
-local pdfliteral = nodepool.pdfliteral
-
-local nodecodes = nodes.nodecodes
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glyph_code = nodecodes.glyph
-
-local a_tagged = attributes.private('tagged')
-local a_image = attributes.private('image')
-
-local traverse_nodes = node.traverse
-local traverse_id = node.traverse_id
-local tosequence = nodes.tosequence
-local copy_node = node.copy
-local slide_nodelist = node.slide
-
-local structure_stack = { }
-local structure_kids = pdfarray()
-local structure_ref = pdfreserveobject()
-local parent_ref = pdfreserveobject()
-local root = { pref = pdfreference(structure_ref), kids = structure_kids }
-local tree = { }
-local elements = { }
-local names = pdfarray()
-local taglist = structures.tags.taglist
-local usedlabels = structures.tags.labels
-local properties = structures.tags.properties
-local usedmapping = { }
-
-local colonsplitter = lpeg.splitat(":")
-local dashsplitter = lpeg.splitat("-")
-
-local add_ids = false -- true
-
-
---~ function codeinjections.maptag(original,target,kind)
---~ mapping[original] = { target, kind or "inline" }
---~ end
+local backends = backends
+local lpdf = lpdf
+local nodes = nodes
+
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+
+local tasks = nodes.tasks
+
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfboolean = lpdf.boolean
+local pdfconstant = lpdf.constant
+local pdfreference = lpdf.reference
+local pdfunicode = lpdf.unicode
+local pdfstring = lpdf.string
+local pdfflushobject = lpdf.flushobject
+local pdfreserveobject = lpdf.reserveobject
+local pdfpagereference = lpdf.pagereference
+
+local addtocatalog = lpdf.addtocatalog
+local addtopageattributes = lpdf.addtopageattributes
+
+local texgetcount = tex.getcount
+
+local nodecodes = nodes.nodecodes
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glyph_code = nodecodes.glyph
+
+local a_tagged = attributes.private('tagged')
+local a_image = attributes.private('image')
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local nodepool = nuts.pool
+local pdfliteral = nodepool.pdfliteral
+
+local getid = nuts.getid
+local getattr = nuts.getattr
+local getprev = nuts.getprev
+local getnext = nuts.getnext
+local getlist = nuts.getlist
+local setfield = nuts.setfield
+
+local traverse_nodes = nuts.traverse
+local tosequence = nuts.tosequence
+local copy_node = nuts.copy
+local slide_nodelist = nuts.slide
+local insert_before = nuts.insert_before
+local insert_after = nuts.insert_after
+
+local structure_stack = { }
+local structure_kids = pdfarray()
+local structure_ref = pdfreserveobject()
+local parent_ref = pdfreserveobject()
+local root = { pref = pdfreference(structure_ref), kids = structure_kids }
+local tree = { }
+local elements = { }
+local names = pdfarray()
+local taglist = structures.tags.taglist
+local usedlabels = structures.tags.labels
+local properties = structures.tags.properties
+local usedmapping = { }
+
+local colonsplitter = lpeg.splitat(":")
+local dashsplitter = lpeg.splitat("-")
+
+local add_ids = false -- true
+
+-- function codeinjections.maptag(original,target,kind)
+-- mapping[original] = { target, kind or "inline" }
+-- end
local function finishstructure()
if #structure_kids > 0 then
@@ -109,14 +127,14 @@ local function finishstructure()
RoleMap = rolemap,
}
pdfflushobject(structure_ref,structuretree)
- lpdf.addtocatalog("StructTreeRoot",pdfreference(structure_ref))
+ addtocatalog("StructTreeRoot",pdfreference(structure_ref))
--
local markinfo = pdfdictionary {
Marked = pdfboolean(true),
-- UserProperties = pdfboolean(true),
-- Suspects = pdfboolean(true),
}
- lpdf.addtocatalog("MarkInfo",pdfreference(pdfflushobject(markinfo)))
+ addtocatalog("MarkInfo",pdfreference(pdfflushobject(markinfo)))
--
for fulltag, element in next, elements do
pdfflushobject(element.knum,element.kids)
@@ -133,7 +151,7 @@ local pdf_struct_element = pdfconstant("StructElem")
local function initializepage()
index = 0
- pagenum = tex.count.realpageno
+ pagenum = texgetcount("realpageno")
pageref = pdfreference(pdfpagereference(pagenum))
list = pdfarray()
tree[pagenum] = list -- we can flush after done, todo
@@ -141,7 +159,7 @@ end
local function finishpage()
-- flush what can be flushed
- lpdf.addtopageattributes("StructParents",pagenum-1)
+ addtopageattributes("StructParents",pagenum-1)
end
-- here we can flush and free elements that are finished
@@ -174,7 +192,8 @@ local function makeelement(fulltag,parent)
end
local function makecontent(parent,start,stop,slist,id)
- local tag, kids = parent.tag, parent.kids
+ local tag = parent.tag
+ local kids = parent.kids
local last = index
if id == "image" then
local d = pdfdictionary {
@@ -197,24 +216,29 @@ local function makecontent(parent,start,stop,slist,id)
end
--
local bliteral = pdfliteral(format("/%s <>BDC",tag,last))
- local prev = start.prev
+ local eliteral = pdfliteral("EMC")
+ -- use insert instead:
+ local prev = getprev(start)
if prev then
- prev.next, bliteral.prev = bliteral, prev
+ setfield(prev,"next",bliteral)
+ setfield(bliteral,"prev",prev)
end
- start.prev, bliteral.next = bliteral, start
- if slist and slist.list == start then
- slist.list = bliteral
- elseif not prev then
- report_tags("this can't happen: injection in front of nothing")
- end
- --
- local eliteral = pdfliteral("EMC")
- local next = stop.next
+ setfield(start,"prev",bliteral)
+ setfield(bliteral,"next",start)
+ -- use insert instead:
+ local next = getnext(stop)
if next then
- next.prev, eliteral.next = eliteral, next
+ setfield(next,"prev",eliteral)
+ setfield(eliteral,"next",next)
end
- stop.next, eliteral.prev = eliteral, stop
+ setfield(stop,"next",eliteral)
+ setfield(eliteral,"prev",stop)
--
+ if slist and getlist(slist) == start then
+ setfield(slist,"list",bliteral)
+ elseif not getprev(start) then
+ report_tags("this can't happen: injection in front of nothing")
+ end
index = index + 1
list[index] = parent.pref
return bliteral, eliteral
@@ -226,9 +250,9 @@ local level, last, ranges, range = 0, nil, { }, nil
local function collectranges(head,list)
for n in traverse_nodes(head) do
- local id = n.id -- 14: image, 8: literal (mp)
+ local id = getid(n) -- 14: image, 8: literal (mp)
if id == glyph_code then
- local at = n[a_tagged]
+ local at = getattr(n,a_tagged)
if not at then
range = nil
elseif last ~= at then
@@ -239,9 +263,9 @@ local function collectranges(head,list)
range[4] = n -- stop
end
elseif id == hlist_code or id == vlist_code then
- local at = n[a_image]
+ local at = getattr(n,a_image)
if at then
- local at = n[a_tagged]
+ local at = getattr(n,a_tagged)
if not at then
range = nil
else
@@ -249,7 +273,7 @@ local function collectranges(head,list)
end
last = nil
else
- local nl = n.list
+ local nl = getlist(n)
slide_nodelist(nl) -- temporary hack till math gets slided (tracker item)
collectranges(nl,n)
end
@@ -261,6 +285,7 @@ function nodeinjections.addtags(head)
-- no need to adapt head, as we always operate on lists
level, last, ranges, range = 0, nil, { }, nil
initializepage()
+ head = tonut(head)
collectranges(head)
if trace_tags then
for i=1,#ranges do
@@ -294,8 +319,9 @@ function nodeinjections.addtags(head)
finishpage()
-- can be separate feature
--
- -- injectspans(head) -- does to work yet
+ -- injectspans(tonut(head)) -- does to work yet
--
+ head = tonode(head)
return head, true
end
diff --git a/Master/texmf-dist/tex/context/base/lpdf-u3d.lua b/Master/texmf-dist/tex/context/base/lpdf-u3d.lua
index 33269486c18..f0fca0762f8 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-u3d.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-u3d.lua
@@ -17,6 +17,7 @@ if not modules then modules = { } end modules ['lpdf-u3d'] = {
-- point we will end up with a reimplementation. For instance
-- it makes sense to add the same activation code as with swf.
+local tonumber = tonumber
local format, find = string.format, string.find
local cos, sin, sqrt, pi, atan2, abs = math.cos, math.sin, math.sqrt, math.pi, math.atan2, math.abs
@@ -38,8 +39,6 @@ local pdfflushstreamfileobject = lpdf.flushstreamfileobject
local checkedkey = lpdf.checkedkey
local limited = lpdf.limited
-local pdfannotation_node = nodes.pool.pdfannotation
-
local schemes = table.tohash {
"Artwork", "None", "White", "Day", "Night", "Hard",
"Primary", "Blue", "Red", "Cube", "CAD", "Headlamp",
@@ -462,7 +461,7 @@ local function insert3d(spec) -- width, height, factor, display, controls, label
},
ProcSet = pdfarray { pdfconstant("PDF"), pdfconstant("ImageC") },
}
- local pwd = pdfflushstreamobject(format("q /GS gs %f 0 0 %f 0 0 cm /IM Do Q",factor*width,factor*height),pw)
+ local pwd = pdfflushstreamobject(format("q /GS gs %F 0 0 %F 0 0 cm /IM Do Q",factor*width,factor*height),pw)
annot.AP = pdfdictionary {
N = pdfreference(pwd)
}
@@ -484,5 +483,5 @@ function nodeinjections.insertu3d(spec)
controls = spec.controls,
label = spec.label,
}
- node.write(pdfannotation_node(spec.width,spec.height,0,annotation()))
+ node.write(nodeinjections.annotation(spec.width,spec.height,0,annotation()))
end
diff --git a/Master/texmf-dist/tex/context/base/lpdf-wid.lua b/Master/texmf-dist/tex/context/base/lpdf-wid.lua
index 9ea4744f164..895bbd3ffb2 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-wid.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-wid.lua
@@ -8,14 +8,18 @@ if not modules then modules = { } end modules ['lpdf-wid'] = {
local gmatch, gsub, find, lower, format = string.gmatch, string.gsub, string.find, string.lower, string.format
local stripstring = string.strip
-local texbox, texcount = tex.box, tex.count
local settings_to_array = utilities.parsers.settings_to_array
local settings_to_hash = utilities.parsers.settings_to_hash
-local report_media = logs.reporter("backend","media")
-local report_attachment = logs.reporter("backend","attachment")
+local report_media = logs.reporter("backend","media")
+local report_attachment = logs.reporter("backend","attachment")
-local backends, lpdf, nodes = backends, lpdf, nodes
+local backends = backends
+local lpdf = lpdf
+local nodes = nodes
+local context = context
+
+local texgetcount = tex.getcount
local nodeinjections = backends.pdf.nodeinjections
local codeinjections = backends.pdf.codeinjections
@@ -42,20 +46,18 @@ local pdfcolorspec = lpdf.colorspec
local pdfflushobject = lpdf.flushobject
local pdfflushstreamobject = lpdf.flushstreamobject
local pdfflushstreamfileobject = lpdf.flushstreamfileobject
-local pdfreserveannotation = lpdf.reserveannotation
local pdfreserveobject = lpdf.reserveobject
local pdfpagereference = lpdf.pagereference
local pdfshareobjectreference = lpdf.shareobjectreference
+local pdfaction = lpdf.action
+local pdfborder = lpdf.border
-local nodepool = nodes.pool
-
-local pdfannotation_node = nodepool.pdfannotation
+local pdftransparencyvalue = lpdf.transparencyvalue
+local pdfcolorvalues = lpdf.colorvalues
local hpack_node = node.hpack
local write_node = node.write -- test context(...) instead
-local pdf_border = pdfarray { 0, 0, 0 } -- can be shared
-
-- symbols
local presets = { } -- xforms
@@ -113,8 +115,8 @@ codeinjections.presetsymbollist = presetsymbollist
-- }
local attachment_symbols = {
- Graph = pdfconstant("GraphPushPin"),
- Paperclip = pdfconstant("PaperclipTag"),
+ Graph = pdfconstant("Graph"),
+ Paperclip = pdfconstant("Paperclip"),
Pushpin = pdfconstant("PushPin"),
}
@@ -166,12 +168,12 @@ end
local function analyzecolor(colorvalue,colormodel)
local cvalue = colorvalue and tonumber(colorvalue)
local cmodel = colormodel and tonumber(colormodel) or 3
- return cvalue and pdfarray { lpdf.colorvalues(cmodel,cvalue) } or nil
+ return cvalue and pdfarray { pdfcolorvalues(cmodel,cvalue) } or nil
end
local function analyzetransparency(transparencyvalue)
local tvalue = transparencyvalue and tonumber(transparencyvalue)
- return tvalue and lpdf.transparencyvalue(tvalue) or nil
+ return tvalue and pdftransparencyvalue(tvalue) or nil
end
-- Attachments
@@ -301,9 +303,9 @@ function nodeinjections.attachfile(specification)
if registered == "" then
registered = filename
end
- if author == "" then
+ if author == "" and title ~= "" then
author = title
- title = ""
+ title = filename or ""
end
if author == "" then
author = filename or ""
@@ -338,7 +340,7 @@ function nodeinjections.attachfile(specification)
OC = analyzelayer(specification.layer),
}
local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
- local box = hpack_node(pdfannotation_node(width,height,depth,d()))
+ local box = hpack_node(nodeinjections.annotation(width,height,depth,d()))
box.width, box.height, box.depth = width, height, depth
return box
end
@@ -423,19 +425,19 @@ function nodeinjections.comment(specification) -- brrr: seems to be done twice
local box
if usepopupcomments then
-- rather useless as we can hide/vide
- local nd = pdfreserveannotation()
- local nc = pdfreserveannotation()
+ local nd = pdfreserveobject()
+ local nc = pdfreserveobject()
local c = pdfdictionary {
Subtype = pdfconstant("Popup"),
Parent = pdfreference(nd),
}
d.Popup = pdfreference(nc)
box = hpack_node(
- pdfannotation_node(0,0,0,d(),nd),
- pdfannotation_node(width,height,depth,c(),nc)
+ nodeinjections.annotation(0,0,0,d(),nd),
+ nodeinjections.annotation(width,height,depth,c(),nc)
)
else
- box = hpack_node(pdfannotation_node(width,height,depth,d()))
+ box = hpack_node(nodeinjections.annotation(width,height,depth,d()))
end
box.width, box.height, box.depth = width, height, depth -- redundant
return box
@@ -480,15 +482,15 @@ end
local ms, mu, mf = { }, { }, { }
local function delayed(label)
- local a = pdfreserveannotation()
+ local a = pdfreserveobject()
mu[label] = a
return pdfreference(a)
end
local function insertrenderingwindow(specification)
local label = specification.label
---~ local openpage = specification.openpage
---~ local closepage = specification.closepage
+ -- local openpage = specification.openpage
+ -- local closepage = specification.closepage
if specification.option == v_auto then
if openpageaction then
-- \handlereferenceactions{\v!StartRendering{#2}}
@@ -500,23 +502,25 @@ local function insertrenderingwindow(specification)
local actions = nil
if openpage or closepage then
actions = pdfdictionary {
- PO = (openpage and lpdf.action(openpage )) or nil,
- PC = (closepage and lpdf.action(closepage)) or nil,
+ PO = (openpage and lpdfaction(openpage )) or nil,
+ PC = (closepage and lpdfaction(closepage)) or nil,
}
end
- local page = tonumber(specification.page) or texcount.realpageno -- todo
- local r = mu[label] or pdfreserveannotation() -- why the reserve here?
+ local page = tonumber(specification.page) or texgetcount("realpageno") -- todo
+ local r = mu[label] or pdfreserveobject() -- why the reserve here?
local a = pdfdictionary {
S = pdfconstant("Rendition"),
R = mf[label],
OP = 0,
AN = pdfreference(r),
}
+ local bs, bc = pdfborder()
local d = pdfdictionary {
Subtype = pdfconstant("Screen"),
P = pdfreference(pdfpagereference(page)),
A = a, -- needed in order to make the annotation clickable (i.e. don't bark)
- Border = pdf_border,
+ Border = bs,
+ C = bc,
AA = actions,
}
local width = specification.width or 0
@@ -524,7 +528,7 @@ local function insertrenderingwindow(specification)
if height == 0 or width == 0 then
-- todo: sound needs no window
end
- write_node(pdfannotation_node(width,height,0,d(),r)) -- save ref
+ write_node(nodeinjections.annotation(width,height,0,d(),r)) -- save ref
return pdfreference(r)
end
@@ -535,35 +539,35 @@ local function insertrendering(specification)
local option = settings_to_hash(specification.option)
if not mf[label] then
local filename = specification.filename
- local isurl = find(filename,"://")
- --~ local start = pdfdictionary {
- --~ Type = pdfconstant("MediaOffset"),
- --~ S = pdfconstant("T"), -- time
- --~ T = pdfdictionary { -- time
- --~ Type = pdfconstant("Timespan"),
- --~ S = pdfconstant("S"),
- --~ V = 3, -- time in seconds
- --~ },
- --~ }
- --~ local start = pdfdictionary {
- --~ Type = pdfconstant("MediaOffset"),
- --~ S = pdfconstant("F"), -- frame
- --~ F = 100 -- framenumber
- --~ }
- --~ local start = pdfdictionary {
- --~ Type = pdfconstant("MediaOffset"),
- --~ S = pdfconstant("M"), -- mark
- --~ M = "somemark",
- --~ }
- --~ local parameters = pdfdictionary {
- --~ BE = pdfdictionary {
- --~ B = start,
- --~ }
- --~ }
- --~ local parameters = pdfdictionary {
- --~ Type = pdfconstant(MediaPermissions),
- --~ TF = pdfstring("TEMPALWAYS") }, -- TEMPNEVER TEMPEXTRACT TEMPACCESS TEMPALWAYS
- --~ }
+ local isurl = find(filename,"://",1,true)
+ -- local start = pdfdictionary {
+ -- Type = pdfconstant("MediaOffset"),
+ -- S = pdfconstant("T"), -- time
+ -- T = pdfdictionary { -- time
+ -- Type = pdfconstant("Timespan"),
+ -- S = pdfconstant("S"),
+ -- V = 3, -- time in seconds
+ -- },
+ -- }
+ -- local start = pdfdictionary {
+ -- Type = pdfconstant("MediaOffset"),
+ -- S = pdfconstant("F"), -- frame
+ -- F = 100 -- framenumber
+ -- }
+ -- local start = pdfdictionary {
+ -- Type = pdfconstant("MediaOffset"),
+ -- S = pdfconstant("M"), -- mark
+ -- M = "somemark",
+ -- }
+ -- local parameters = pdfdictionary {
+ -- BE = pdfdictionary {
+ -- B = start,
+ -- }
+ -- }
+ -- local parameters = pdfdictionary {
+ -- Type = pdfconstant(MediaPermissions),
+ -- TF = pdfstring("TEMPALWAYS") }, -- TEMPNEVER TEMPEXTRACT TEMPACCESS TEMPALWAYS
+ -- }
local descriptor = pdfdictionary {
Type = pdfconstant("Filespec"),
F = filename,
diff --git a/Master/texmf-dist/tex/context/base/lpdf-xmp.lua b/Master/texmf-dist/tex/context/base/lpdf-xmp.lua
index 061ed075718..c8b2d236c62 100644
--- a/Master/texmf-dist/tex/context/base/lpdf-xmp.lua
+++ b/Master/texmf-dist/tex/context/base/lpdf-xmp.lua
@@ -7,6 +7,7 @@ if not modules then modules = { } end modules ['lpdf-xmp'] = {
comment = "with help from Peter Rolf",
}
+local tostring = tostring
local format, random, char, gsub, concat = string.format, math.random, string.char, string.gsub, table.concat
local xmlfillin = xml.fillin
@@ -119,16 +120,16 @@ end
-- redefined
-local addtoinfo = lpdf.addtoinfo
-local addxmpinfo = lpdf.addxmpinfo
+local pdfaddtoinfo = lpdf.addtoinfo
+local pdfaddxmpinfo = lpdf.addxmpinfo
function lpdf.addtoinfo(tag,pdfvalue,strvalue)
- addtoinfo(tag,pdfvalue)
+ pdfaddtoinfo(tag,pdfvalue)
local value = strvalue or gsub(tostring(pdfvalue),"^%((.*)%)$","%1") -- hack
if trace_info then
report_info("set %a to %a",tag,value)
end
- addxmpinfo(tag,value)
+ pdfaddxmpinfo(tag,value)
end
-- for the do-it-yourselvers
@@ -159,20 +160,20 @@ local function flushxmpinfo()
local fullbanner = tex.pdftexbanner
-- local fullbanner = gsub(tex.pdftexbanner,"kpse.*","")
- addxmpinfo("DocumentID", documentid)
- addxmpinfo("InstanceID", instanceid)
- addxmpinfo("Producer", producer)
- addxmpinfo("CreatorTool", creator)
- addxmpinfo("CreateDate", time)
- addxmpinfo("ModifyDate", time)
- addxmpinfo("MetadataDate", time)
- addxmpinfo("PTEX.Fullbanner", fullbanner)
-
- addtoinfo("Producer", producer)
- addtoinfo("Creator", creator)
- addtoinfo("CreationDate", time)
- addtoinfo("ModDate", time)
--- addtoinfo("PTEX.Fullbanner", fullbanner) -- no checking done on existence
+ pdfaddxmpinfo("DocumentID", documentid)
+ pdfaddxmpinfo("InstanceID", instanceid)
+ pdfaddxmpinfo("Producer", producer)
+ pdfaddxmpinfo("CreatorTool", creator)
+ pdfaddxmpinfo("CreateDate", time)
+ pdfaddxmpinfo("ModifyDate", time)
+ pdfaddxmpinfo("MetadataDate", time)
+ pdfaddxmpinfo("PTEX.Fullbanner", fullbanner)
+
+ pdfaddtoinfo("Producer", producer)
+ pdfaddtoinfo("Creator", creator)
+ pdfaddtoinfo("CreationDate", time)
+ pdfaddtoinfo("ModDate", time)
+-- pdfaddtoinfo("PTEX.Fullbanner", fullbanner) -- no checking done on existence
local blob = xml.tostring(xml.first(xmp or valid_xmp(),"/x:xmpmeta"))
local md = pdfdictionary {
diff --git a/Master/texmf-dist/tex/context/base/luat-cbk.lua b/Master/texmf-dist/tex/context/base/luat-cbk.lua
index 5aa12005b78..8c224ad2cbc 100644
--- a/Master/texmf-dist/tex/context/base/luat-cbk.lua
+++ b/Master/texmf-dist/tex/context/base/luat-cbk.lua
@@ -118,7 +118,7 @@ end
function callbacks.freeze(name,freeze)
freeze = type(freeze) == "string" and freeze
- if find(name,"%*") then
+ if find(name,"*",1,true) then
local pattern = name
for name, _ in next, list do
if find(name,pattern) then
@@ -306,7 +306,7 @@ function garbagecollector.check(size,criterium)
end
end
--- this will move
+-- this will move to a module
commands = commands or { }
diff --git a/Master/texmf-dist/tex/context/base/luat-cnf.lua b/Master/texmf-dist/tex/context/base/luat-cnf.lua
index 3672c603ece..fba2b71d1ec 100644
--- a/Master/texmf-dist/tex/context/base/luat-cnf.lua
+++ b/Master/texmf-dist/tex/context/base/luat-cnf.lua
@@ -23,7 +23,7 @@ texconfig.half_error_line = 50 -- 50 -- obsolete
texconfig.expand_depth = 10000 -- 10000
texconfig.hash_extra = 100000 -- 0
texconfig.nest_size = 1000 -- 50
-texconfig.max_in_open = 500 -- 15
+texconfig.max_in_open = 500 -- 15 -- in fact it's limited to 127
texconfig.max_print_line = 10000 -- 79
texconfig.max_strings = 500000 -- 15000
texconfig.param_size = 25000 -- 60
@@ -134,13 +134,14 @@ function texconfig.init()
-- shortcut and helper
+ local bytecode = lua.bytecode
+
local function init(start)
- local b = lua.bytecode
local i = start
local t = os.clock()
- while b[i] do
- b[i]() ;
- b[i] = nil ;
+ while bytecode[i] do
+ bytecode[i]() ;
+ bytecode[i] = nil ;
i = i + 1
-- collectgarbage('step')
end
@@ -159,6 +160,8 @@ function texconfig.init()
end
end
+ texconfig.init = function() end
+
end
-- we provide a qualified path
diff --git a/Master/texmf-dist/tex/context/base/luat-cod.lua b/Master/texmf-dist/tex/context/base/luat-cod.lua
index 8b015477f4e..c16a3b110ea 100644
--- a/Master/texmf-dist/tex/context/base/luat-cod.lua
+++ b/Master/texmf-dist/tex/context/base/luat-cod.lua
@@ -51,6 +51,9 @@ function lua.registercode(filename,version)
bytecode[n] = code
lua.lastbytecode = n
end
+ elseif environment.initex then
+ texio.write_nl("\nerror loading file: " .. filename .. " (aborting)")
+ os.exit()
end
end
end
@@ -85,7 +88,7 @@ local environment = environment
-- no string.unquoted yet
local sourcefile = gsub(arg and arg[1] or "","^\"(.*)\"$","%1")
-local sourcepath = find(sourcefile,"/") and gsub(sourcefile,"/[^/]+$","") or ""
+local sourcepath = find(sourcefile,"/",1,true) and gsub(sourcefile,"/[^/]+$","") or ""
local targetpath = "."
-- delayed (via metatable):
diff --git a/Master/texmf-dist/tex/context/base/luat-env.lua b/Master/texmf-dist/tex/context/base/luat-env.lua
index 8753972c6fe..5f2a0d281a0 100644
--- a/Master/texmf-dist/tex/context/base/luat-env.lua
+++ b/Master/texmf-dist/tex/context/base/luat-env.lua
@@ -20,6 +20,8 @@ local report_lua = logs.reporter("resolvers","lua")
local luautilities = utilities.lua
local luasuffixes = luautilities.suffixes
+local texgettoks = tex and tex.gettoks
+
environment = environment or { }
local environment = environment
@@ -28,7 +30,7 @@ local environment = environment
local mt = {
__index = function(_,k)
if k == "version" then
- local version = tex.toks and tex.toks.contextversiontoks
+ local version = texgettoks and texgettoks("contextversiontoks")
if version and version ~= "" then
rawset(environment,"version",version)
return version
@@ -36,7 +38,7 @@ local mt = {
return "unknown"
end
elseif k == "kind" then
- local kind = tex.toks and tex.toks.contextkindtoks
+ local kind = texgettoks and texgettoks("contextkindtoks")
if kind and kind ~= "" then
rawset(environment,"kind",kind)
return kind
@@ -100,14 +102,20 @@ function environment.luafilechunk(filename,silent) -- used for loading lua bytec
local fullname = environment.luafile(filename)
if fullname and fullname ~= "" then
local data = luautilities.loadedluacode(fullname,strippable,filename) -- can be overloaded
- if trace_locating then
+-- if trace_locating then
+-- report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
+-- elseif not silent then
+-- texio.write("<",data and "+ " or "- ",fullname,">")
+-- end
+ if not silent then
report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
- elseif not silent then
- texio.write("<",data and "+ " or "- ",fullname,">")
end
return data
else
- if trace_locating then
+-- if trace_locating then
+-- report_lua("unknown file %a",filename)
+-- end
+ if not silent then
report_lua("unknown file %a",filename)
end
return nil
diff --git a/Master/texmf-dist/tex/context/base/luat-fio.lua b/Master/texmf-dist/tex/context/base/luat-fio.lua
index d61c6f14275..dcc183167d8 100644
--- a/Master/texmf-dist/tex/context/base/luat-fio.lua
+++ b/Master/texmf-dist/tex/context/base/luat-fio.lua
@@ -84,26 +84,6 @@ if not resolvers.instance then
end
-local report_system = logs.reporter("system","files")
-local report_files = logs.reporter("used files")
-
-luatex.registerstopactions(function()
- local foundintrees = resolvers.instance.foundintrees
- if #foundintrees > 0 then
- logs.pushtarget("logfile")
- logs.newline()
- report_system("start used files")
- logs.newline()
- for i=1,#foundintrees do
- report_files("%4i: % T",i,foundintrees[i])
- end
- logs.newline()
- report_system("stop used files")
- logs.newline()
- logs.poptarget()
- end
-end)
-
statistics.register("resource resolver", function()
local scandata = resolvers.scandata()
return format("loadtime %s seconds, %s scans with scantime %s seconds, %s shared scans, %s found files, scanned paths: %s",
diff --git a/Master/texmf-dist/tex/context/base/luat-ini.lua b/Master/texmf-dist/tex/context/base/luat-ini.lua
index 587214b93a1..9303b614a3e 100644
--- a/Master/texmf-dist/tex/context/base/luat-ini.lua
+++ b/Master/texmf-dist/tex/context/base/luat-ini.lua
@@ -72,6 +72,8 @@ lua.messages = lua.messages or { } local messages = lua.messages
storage.register("lua/numbers", numbers, "lua.numbers" )
storage.register("lua/messages", messages, "lua.messages")
+local f_message = string.formatters["=[instance: %s]"] -- the = controls the lua error / see: lobject.c
+
local setfenv = setfenv or debug.setfenv -- < 5.2
if setfenv then
@@ -183,7 +185,7 @@ elseif libraries then -- assume >= 5.2
messages[lnn] = message
numbers[name] = lnn
end
- luanames[lnn] = message
+ luanames[lnn] = f_message(message)
context(lnn)
end
@@ -198,7 +200,7 @@ else
messages[lnn] = message
numbers[name] = lnn
end
- luanames[lnn] = message
+ luanames[lnn] = f_message(message)
context(lnn)
end
diff --git a/Master/texmf-dist/tex/context/base/luat-ini.mkiv b/Master/texmf-dist/tex/context/base/luat-ini.mkiv
index a3a5903119c..7823ebd5a96 100644
--- a/Master/texmf-dist/tex/context/base/luat-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/luat-ini.mkiv
@@ -120,23 +120,31 @@
\obeyluatokens
\csname\??luacode#1\endcsname}
+% \unexpanded\def\definenamedlua[#1]#2[#3]% no optional arg handling here yet / we could use numbers instead (more efficient)
+% {\ifcsname\??luacode#1\endcsname \else
+% \scratchcounter\ctxlua{lua.registername("#1","#3")}%
+% \normalexpanded{\xdef\csname\??luacode#1\endcsname##1\csname\e!stop#1\v!code\endcsname}%
+% {\noexpand\normalexpanded{\endgroup\noexpand\directlua\the\scratchcounter{_G=protect("#1\s!data")##1}}}%
+% \expandafter\edef\csname\e!start#1\v!code\endcsname {\luat_start_named_lua_code{#1}}%
+% \expandafter\edef\csname #1\v!code\endcsname##1{\noexpand\directlua\the\scratchcounter{_G=protect("#1\s!data")##1}}%
+% \fi}
+
\unexpanded\def\definenamedlua[#1]#2[#3]% no optional arg handling here yet / we could use numbers instead (more efficient)
{\ifcsname\??luacode#1\endcsname \else
- \scratchcounter\ctxlua{lua.registername("#1","#3")}%
+ \expandafter\chardef\csname\??luacode:#1\endcsname\ctxlua{lua.registername("#1","#3")}%
\normalexpanded{\xdef\csname\??luacode#1\endcsname##1\csname\e!stop#1\v!code\endcsname}%
- %{\endgroup\noexpand\directlua\the\scratchcounter{local _ENV=protect("#1\s!data")##1}}%
- {\noexpand\normalexpanded{\endgroup\noexpand\directlua\the\scratchcounter{local _ENV=protect("#1\s!data")##1}}}%
+ {\noexpand\normalexpanded{\endgroup\noexpand\directlua\csname\??luacode:#1\endcsname{_G=protect("#1\s!data")##1}}}%
\expandafter\edef\csname\e!start#1\v!code\endcsname {\luat_start_named_lua_code{#1}}%
- \expandafter\edef\csname #1\v!code\endcsname##1{\noexpand\directlua\the\scratchcounter{local _ENV=protect("#1\s!data")##1}}%
+ \expandafter\edef\csname #1\v!code\endcsname##1{\noexpand\directlua\csname\??luacode:#1\endcsname{_G=protect("#1\s!data")##1}}%
\fi}
%D We predefine a few.
% \definenamedlua[module][module instance] % not needed
-\definenamedlua[user] [private user instance]
-\definenamedlua[third] [third party module instance]
-\definenamedlua[isolated][isolated instance]
+\definenamedlua[user] [private user]
+\definenamedlua[third] [third party module]
+\definenamedlua[isolated][isolated]
%D In practice this works out as follows:
%D
@@ -266,4 +274,53 @@
\def\luat_lua_code
{\normalexpanded{\endgroup\noexpand\directlua\expandafter{\the\scratchtoks}}} % \zerocount is default
+% \startctxfunction MyFunctionA
+% context(" A1 ")
+% \stopctxfunction
+%
+% \startctxfunctiondefinition MyFunctionB
+% context(" B2 ")
+% \stopctxfunctiondefinition
+%
+% \starttext
+% \dorecurse{10000}{\ctxfunction{MyFunctionA}} \page
+% \dorecurse{10000}{\MyFunctionB} \page
+% \dorecurse{10000}{\ctxlua{context(" C3 ")}} \page
+% \stoptext
+
+\installcorenamespace{ctxfunction}
+
+\normalprotected\def\startctxfunctiondefinition #1 %
+ {\begingroup \obeylualines \obeyluatokens \luat_start_lua_function_definition_indeed{#1}}
+
+% \def\luat_start_lua_function_definition_indeed#1#2\stopctxfunctiondefinition
+% {\endgroup\expandafter\edef\csname#1\endcsname{\noexpand\luafunction\ctxcommand{ctxfunction(\!!bs#2\!!es)}\relax}}
+
+\installcorenamespace{luafunction}
+
+\def\luat_start_lua_function_definition_indeed#1#2\stopctxfunctiondefinition
+ {\endgroup
+ \expandafter\chardef\csname\??luafunction#1\endcsname\ctxcommand{ctxfunction(\!!bs#2\!!es)}\relax
+ \expandafter\edef\csname#1\endcsname{\noexpand\luafunction\csname\??luafunction#1\endcsname}}
+
+% \unexpanded\def\setctxluafunction#1#2% experiment
+% {\expandafter\chardef\csname#1\endcsname#2\relax}
+
+\unexpanded\def\setctxluafunction#1#2% experiment
+ {\expandafter\chardef\csname\??luafunction#1\endcsname#2\relax
+ \expandafter\edef\csname#1\endcsname{\noexpand\luafunction\csname\??luafunction#1\endcsname}}
+
+\let\stopctxfunctiondefinition\relax
+
+\normalprotected\def\startctxfunction #1 %
+ {\begingroup \obeylualines \obeyluatokens \luat_start_lua_function_indeed{#1}}
+
+\def\luat_start_lua_function_indeed#1#2\stopctxfunction
+ {\endgroup\expandafter\edef\csname\??ctxfunction#1\endcsname{\noexpand\luafunction\ctxcommand{ctxfunction(\!!bs#2\!!es)}\relax}}
+
+\let\stopctxfunction\relax
+
+\def\ctxfunction#1%
+ {\csname\??ctxfunction#1\endcsname}
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/luat-mac.lua b/Master/texmf-dist/tex/context/base/luat-mac.lua
index c8be06b63ef..282dc8ce3d3 100644
--- a/Master/texmf-dist/tex/context/base/luat-mac.lua
+++ b/Master/texmf-dist/tex/context/base/luat-mac.lua
@@ -76,21 +76,23 @@ local function pop()
top = remove(stack)
end
-local leftbrace = P("{") -- will be in patterns
-local rightbrace = P("}")
-local escape = P("\\")
+local leftbrace = P("{") -- will be in patterns
+local rightbrace = P("}")
+local escape = P("\\")
-local space = patterns.space
-local spaces = space^1
-local newline = patterns.newline
-local nobrace = 1 - leftbrace - rightbrace
+local space = patterns.space
+local spaces = space^1
+local newline = patterns.newline
+local nobrace = 1 - leftbrace - rightbrace
local longleft = leftbrace -- P("(")
local longright = rightbrace -- P(")")
local nolong = 1 - longleft - longright
-local name = R("AZ","az")^1
-local csname = (R("AZ","az") + S("@?!_"))^1
+local utf8character = P(1) * R("\128\191")^1 -- unchecked but fast
+
+local name = (R("AZ","az") + utf8character)^1
+local csname = (R("AZ","az") + S("@?!_") + utf8character)^1
local longname = (longleft/"") * (nolong^1) * (longright/"")
local variable = P("#") * Cs(name + longname)
local escapedname = escape * csname
diff --git a/Master/texmf-dist/tex/context/base/luat-run.lua b/Master/texmf-dist/tex/context/base/luat-run.lua
index eaede103014..607c3528ae7 100644
--- a/Master/texmf-dist/tex/context/base/luat-run.lua
+++ b/Master/texmf-dist/tex/context/base/luat-run.lua
@@ -6,8 +6,8 @@ if not modules then modules = { } end modules ['luat-run'] = {
license = "see context related readme files"
}
-local format = string.format
-local insert = table.insert
+local format, find = string.format, string.find
+local insert, remove = table.insert, table.remove
-- trace_job_status is also controlled by statistics.enable that is set via the directive system.nostatistics
@@ -103,23 +103,25 @@ callbacks.register("pre_dump", pre_dump_actions, "lua related fi
local tempfiles = { }
-function luatex.registertempfile(name,extrasuffix)
+function luatex.registertempfile(name,extrasuffix,keep) -- namespace might change
if extrasuffix then
name = name .. ".mkiv-tmp" -- maybe just .tmp
end
if trace_temp_files and not tempfiles[name] then
report_tempfiles("registering temporary file %a",name)
end
- tempfiles[name] = true
+ tempfiles[name] = keep or false
return name
end
function luatex.cleanuptempfiles()
- for name, _ in next, tempfiles do
- if trace_temp_files then
- report_tempfiles("removing temporary file %a",name)
+ for name, keep in next, tempfiles do
+ if not keep then
+ if trace_temp_files then
+ report_tempfiles("removing temporary file %a",name)
+ end
+ os.remove(name)
end
- os.remove(name)
end
tempfiles = { }
end
@@ -156,3 +158,75 @@ statistics.register("synctex tracing",function()
return "synctex has been enabled (extra log file generated)"
end
end)
+
+-- filenames
+
+local types = {
+ "data",
+ "font map",
+ "image",
+ "font subset",
+ "full font",
+}
+
+local report_open = logs.reporter("open source")
+local report_close = logs.reporter("close source")
+local report_load = logs.reporter("load resource")
+
+local register = callbacks.register
+
+local level = 0
+local total = 0
+local stack = { }
+local all = false
+
+local function report_start(left,name)
+ if not left then
+ -- skip
+ elseif left ~= 1 then
+ if all then
+ report_load("%s > %s",types[left],name or "?")
+ end
+ elseif find(name,"virtual://") then
+ insert(stack,false)
+ else
+ insert(stack,name)
+ total = total + 1
+ level = level + 1
+ report_open("%i > %i > %s",level,total,name or "?")
+ end
+end
+
+local function report_stop(right)
+ if level == 1 or not right or right == 1 then
+ local name = remove(stack)
+ if name then
+ report_close("%i > %i > %s",level,total,name or "?")
+ level = level - 1
+ end
+ end
+end
+
+local function report_none()
+end
+
+register("start_file",report_start)
+register("stop_file", report_stop)
+
+directives.register("system.reportfiles", function(v)
+ if v == "noresources" then
+ all = false
+ register("start_file",report_start)
+ register("stop_file", report_stop)
+ elseif toboolean(v) or v == "all" then
+ all = true
+ register("start_file",report_start)
+ register("stop_file", report_stop)
+ elseif v == "traditional" then
+ register("start_file",nil)
+ register("stop_file", nil)
+ else
+ register("start_file",report_none)
+ register("stop_file", report_none)
+ end
+end)
diff --git a/Master/texmf-dist/tex/context/base/luat-sto.lua b/Master/texmf-dist/tex/context/base/luat-sto.lua
index 7a11b7f5e68..b04d655c29d 100644
--- a/Master/texmf-dist/tex/context/base/luat-sto.lua
+++ b/Master/texmf-dist/tex/context/base/luat-sto.lua
@@ -13,6 +13,7 @@ local gmatch, format = string.gmatch, string.format
local serialize, concat, sortedhash = table.serialize, table.concat, table.sortedhash
local bytecode = lua.bytecode
local strippedloadstring = utilities.lua.strippedloadstring
+local formatters = string.formatters
local trace_storage = false
local report_storage = logs.reporter("system","storage")
@@ -48,38 +49,71 @@ function storage.register(...)
return t
end
-local n = 0
-local function dump()
- local max = storage.max
- for i=1,#data do
- local d = data[i]
- local message, original, target = d[1], d[2] ,d[3]
- local c, code, name = 0, { }, nil
- -- we have a nice definer for this
- for str in gmatch(target,"([^%.]+)") do
- if name then
- name = name .. "." .. str
+local n = 0 -- is that one used ?
+
+if environment.initex then
+
+ -- local function dump()
+ -- local max = storage.max
+ -- for i=1,#data do
+ -- local d = data[i]
+ -- local message, original, target = d[1], d[2] ,d[3]
+ -- local c, code, name = 0, { }, nil
+ -- -- we have a nice definer for this
+ -- for str in gmatch(target,"([^%.]+)") do
+ -- if name then
+ -- name = name .. "." .. str
+ -- else
+ -- name = str
+ -- end
+ -- c = c + 1 ; code[c] = formatters["%s = %s or { }"](name,name)
+ -- end
+ -- max = max + 1
+ -- if trace_storage then
+ -- c = c + 1 ; code[c] = formatters["print('restoring %s from slot %s')"](message,max)
+ -- end
+ -- c = c + 1 ; code[c] = serialize(original,name)
+ -- if trace_storage then
+ -- report_storage('saving %a in slot %a, size %s',message,max,#code[c])
+ -- end
+ -- -- we don't need tracing in such tables
+ -- bytecode[max] = strippedloadstring(concat(code,"\n"),storage.strip,format("slot %s (%s)",max,name))
+ -- collectgarbage("step")
+ -- end
+ -- storage.max = max
+ -- end
+
+ local function dump()
+ local max = storage.max
+ local strip = storage.strip
+ for i=1,#data do
+ max = max + 1
+ local tabledata = data[i]
+ local message = tabledata[1]
+ local original = tabledata[2]
+ local target = tabledata[3]
+ local definition = utilities.tables.definetable(target,false,true)
+ local comment = formatters["restoring %s from slot %s"](message,max)
+ if trace_storage then
+ comment = formatters["print('%s')"](comment)
else
- name = str
+ comment = formatters["-- %s"](comment)
end
- c = c + 1 ; code[c] = format("%s = %s or { }",name,name)
- end
- max = max + 1
- if trace_storage then
- c = c + 1 ; code[c] = format("print('restoring %s from slot %s')",message,max)
- end
- c = c + 1 ; code[c] = serialize(original,name)
- if trace_storage then
- report_storage('saving %a in slot %a, size %s',message,max,#code[c])
+ local dumped = serialize(original,target)
+ if trace_storage then
+ report_storage('saving %a in slot %a, size %s',message,max,#dumped)
+ end
+ -- we don't need tracing in such tables
+ dumped = concat({ definition, comment, dumped },"\n")
+ bytecode[max] = strippedloadstring(dumped,strip,formatters["slot %s (%s)"](max,name))
+ collectgarbage("step")
end
- -- we don't need tracing in such tables
- bytecode[max] = strippedloadstring(concat(code,"\n"),storage.strip,format("slot %s (%s)",max,name))
- collectgarbage("step")
+ storage.max = max
end
- storage.max = max
-end
-lua.registerfinalizer(dump,"dump storage")
+ lua.registerfinalizer(dump,"dump storage")
+
+end
-- to be tested with otf caching:
@@ -115,31 +149,14 @@ statistics.register("stored bytecode data", function()
local tofmodules = storage.tofmodules or 0
local tofdumps = storage.toftables or 0
if environment.initex then
- local luautilities = utilities.lua
- local nofstrippedbytes = luautilities.nofstrippedbytes
- local nofstrippedchunks = luautilities.nofstrippedchunks
- if nofstrippedbytes > 0 then
- return format("%s modules, %s tables, %s chunks, %s chunks stripped (%s bytes)",
- nofmodules,
- nofdumps,
- nofmodules + nofdumps,
- nofstrippedchunks,
- nofstrippedbytes
- )
- elseif nofstrippedchunks > 0 then
- return format("%s modules, %s tables, %s chunks, %s chunks stripped",
- nofmodules,
- nofdumps,
- nofmodules + nofdumps,
- nofstrippedchunks
- )
- else
- return format("%s modules, %s tables, %s chunks",
- nofmodules,
- nofdumps,
- nofmodules + nofdumps
- )
- end
+ local luautilities = utilities.lua
+ return format("%s modules, %s tables, %s chunks, %s chunks stripped (%s bytes)",
+ nofmodules,
+ nofdumps,
+ nofmodules + nofdumps,
+ luautilities.nofstrippedchunks or 0,
+ luautilities.nofstrippedbytes or 0
+ )
else
return format("%s modules (%0.3f sec), %s tables (%0.3f sec), %s chunks (%0.3f sec)",
nofmodules, tofmodules,
@@ -163,6 +180,7 @@ storage.register("storage/shared", storage.shared, "storage.shared")
local mark = storage.mark
if string.patterns then mark(string.patterns) end
+if string.formatters then mark(string.formatters) end
if lpeg.patterns then mark(lpeg.patterns) end
if os.env then mark(os.env) end
if number.dimenfactors then mark(number.dimenfactors) end
diff --git a/Master/texmf-dist/tex/context/base/lxml-css.lua b/Master/texmf-dist/tex/context/base/lxml-css.lua
index c5a85c2bd6f..0deaea4d391 100644
--- a/Master/texmf-dist/tex/context/base/lxml-css.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-css.lua
@@ -30,8 +30,9 @@ if tex then
local exheights = fonts.hashes.exheights
local emwidths = fonts.hashes.emwidths
+ local texget = tex.get
- percentage = function(s,pcf) return tonumber(s) * (pcf or tex.hsize) end
+ percentage = function(s,pcf) return tonumber(s) * (pcf or texget("hsize")) end
exheight = function(s,exf) return tonumber(s) * (exf or exheights[true]) end
emwidth = function(s,emf) return tonumber(s) * (emf or emwidths[true]) end
pixels = function(s,pxf) return tonumber(s) * (pxf or emwidths[true]/300) end
@@ -109,17 +110,17 @@ css.padding = padding
-- print(padding("0",pixel,hsize,exheight,emwidth))
--- local currentfont = font.current
--- local texdimen = tex.dimen
--- local hashes = fonts.hashes
--- local quads = hashes.quads
--- local xheights = hashes.xheights
+-- local currentfont = font.current
+-- local texget = tex.get
+-- local hashes = fonts.hashes
+-- local quads = hashes.quads
+-- local xheights = hashes.xheights
--
-- local function padding(str)
-- local font = currentfont()
-- local exheight = xheights[font]
-- local emwidth = quads[font]
--- local hsize = texdimen.hsize/100
+-- local hsize = texget("hsize")/100
-- local pixel = emwidth/100
-- return padding(str,pixel,hsize,exheight,emwidth)
-- end
diff --git a/Master/texmf-dist/tex/context/base/lxml-ctx.lua b/Master/texmf-dist/tex/context/base/lxml-ctx.lua
index 968dbda7174..1191d6796a9 100644
--- a/Master/texmf-dist/tex/context/base/lxml-ctx.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-ctx.lua
@@ -10,11 +10,13 @@ if not modules then modules = { } end modules ['lxml-ctx'] = {
local format, find = string.format, string.find
-local xml = xml
-
+local xml = xml
xml.ctx = { }
xml.ctx.enhancers = { }
+local context = context
+local commands = commands
+
-- hashen
function xml.ctx.enhancers.compound(root,lpath,before,tokens,after) -- todo lpeg
diff --git a/Master/texmf-dist/tex/context/base/lxml-dir.lua b/Master/texmf-dist/tex/context/base/lxml-dir.lua
index 3c68664aefe..48c0ac41e73 100644
--- a/Master/texmf-dist/tex/context/base/lxml-dir.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-dir.lua
@@ -24,12 +24,13 @@ local formatters = string.formatters
--
--
-local lxml, context = lxml, context
+local lxml = lxml
+local context = context
-local getid = lxml.getid
+local getid = lxml.getid
-lxml.directives = lxml.directives or { }
-local directives = lxml.directives
+local directives = lxml.directives or { }
+lxml.directives = directives
local report_lxml = logs.reporter("xml","tex")
@@ -106,9 +107,11 @@ directives.handle = handle_setup
function directives.setup(root,attribute,element)
handle_setup('setup',root,attribute,element)
end
+
function directives.before(root,attribute,element)
handle_setup('before',root,attribute,element)
end
+
function directives.after(root,attribute,element)
handle_setup('after',root,attribute,element)
end
diff --git a/Master/texmf-dist/tex/context/base/lxml-ini.mkiv b/Master/texmf-dist/tex/context/base/lxml-ini.mkiv
index d2d64aa8d52..239fe4ac0c3 100644
--- a/Master/texmf-dist/tex/context/base/lxml-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/lxml-ini.mkiv
@@ -58,6 +58,7 @@
\def\xmldirect #1{\ctxlxml{direct("#1")}} % in loops, not dt but root
\def\xmlidx #1#2#3{\ctxlxml{idx("#1","#2",\number#3)}}
\def\xmlinclude #1#2#3{\ctxlxml{include("#1","#2","#3",true)}}
+\def\xmlsave #1#2{\ctxlxml{save("#1","#2")}}
\def\xmlindex #1#2#3{\ctxlxml{index("#1","#2",\number#3)}}
\def\xmlinfo #1{\hbox{\ttxx[\ctxlxml{info("#1")}]}}
\def\xmlshow #1{\startpacked\ttx\xmlverbatim{#1}\stoppacked}
@@ -87,10 +88,10 @@
\def\xmldisplayverbatim #1{\ctxlxml{displayverbatim("#1")}}
\def\xmlinlineverbatim #1{\ctxlxml{inlineverbatim("#1")}}
-\def\xmlload #1#2{\ctxlxml{load("#1","#2","\p_lxml_entities","\p_lxml_compress")}}
-\def\xmlloadbuffer #1#2{\ctxlxml{loadbuffer("#1","#2","\p_lxml_entities","\p_lxml_compress")}}
-\def\xmlloaddata #1#2{\ctxlxml{loaddata("#1",\!!bs#2\!!es,"\p_lxml_entities","\p_lxml_compress")}}
-\def\xmlloadregistered #1#2{\ctxlxml{loadregistered("#1","\p_lxml_entities","\p_lxml_compress")}}
+\def\xmlload #1#2{\ctxlxml{load("#1","#2","\directxmlparameter\c!entities","\directxmlparameter\c!compress")}}
+\def\xmlloadbuffer #1#2{\ctxlxml{loadbuffer("#1","#2","\directxmlparameter\c!entities","\directxmlparameter\c!compress")}}
+\def\xmlloaddata #1#2{\ctxlxml{loaddata("#1",\!!bs#2\!!es,"\directxmlparameter\c!entities","\directxmlparameter\c!compress")}}
+\def\xmlloadregistered #1#2{\ctxlxml{loadregistered("#1","\directxmlparameter\c!entities","\directxmlparameter\c!compress")}}
\def\xmlloaddirectives #1{\ctxlxml{directives.load("any:///#1")}}
\def\xmlpos #1{\ctxlxml{pos("#1")}}
@@ -124,9 +125,10 @@
\let\xmlgrab\xmlsetsetup % obsolete
\let\xmlself\s!unknown % obsolete
-\def\xmlsetup#1#2{\setupwithargument{#2}{#1}}
+%\ef\xmlsetup#1#2{\setupwithargument{#2}{#1}}
+\let\xmlsetup\setupwithargumentswapped
-\let\xmls\xmlsetup
+\let\xmls\setupwithargumentswapped
\let\xmlw\setupwithargument
\newtoks \registeredxmlsetups
@@ -313,17 +315,6 @@
\xmlprocessingmode\executeifdefined{\??xmldefaults\directxmlparameter\c!default}\plusone
\to \everysetupxml
-\unexpanded\def\initializexmlprocessing % is this still needed?
- {\the\everysetupxml}
-
-\let\p_lxml_entities\empty
-\let\p_lxml_compress\empty
-
-\appendtoks
- \edef\p_lxml_entities{\directxmlparameter\c!entities}%
- \edef\p_lxml_compress{\directxmlparameter\c!compress}%
-\to \everysetupxml
-
\setupxml
[\c!default=, % flush all
\c!compress=\v!no, % strip comment
diff --git a/Master/texmf-dist/tex/context/base/lxml-lpt.lua b/Master/texmf-dist/tex/context/base/lxml-lpt.lua
index 51ab321b931..8567f26234a 100644
--- a/Master/texmf-dist/tex/context/base/lxml-lpt.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-lpt.lua
@@ -1039,37 +1039,6 @@ local function normal_apply(list,parsed,nofparsed,order)
return collected
end
---~ local function applylpath(list,pattern)
---~ -- we avoid an extra call
---~ local parsed = cache[pattern]
---~ if parsed then
---~ lpathcalls = lpathcalls + 1
---~ lpathcached = lpathcached + 1
---~ elseif type(pattern) == "table" then
---~ lpathcalls = lpathcalls + 1
---~ parsed = pattern
---~ else
---~ parsed = lpath(pattern) or pattern
---~ end
---~ if not parsed then
---~ return
---~ end
---~ local nofparsed = #parsed
---~ if nofparsed == 0 then
---~ return -- something is wrong
---~ end
---~ local one = list[1] -- we could have a third argument: isroot and list or list[1] or whatever we like ... todo
---~ if not one then
---~ return -- something is wrong
---~ elseif not trace_lpath then
---~ return normal_apply(list,parsed,nofparsed,one.mi)
---~ elseif trace_lprofile then
---~ return profiled_apply(list,parsed,nofparsed,one.mi)
---~ else
---~ return traced_apply(list,parsed,nofparsed,one.mi)
---~ end
---~ end
-
local function applylpath(list,pattern)
if not list then
return
@@ -1384,8 +1353,13 @@ function xml.elements(root,pattern,reverse) -- r, d, k
local collected = applylpath(root,pattern)
if not collected then
return dummy
- elseif reverse then
- local c = #collected + 1
+ end
+ local n = #collected
+ if n == 0 then
+ return dummy
+ end
+ if reverse then
+ local c = n + 1
return function()
if c > 1 then
c = c - 1
@@ -1395,7 +1369,7 @@ function xml.elements(root,pattern,reverse) -- r, d, k
end
end
else
- local n, c = #collected, 0
+ local c = 0
return function()
if c < n then
c = c + 1
@@ -1411,8 +1385,13 @@ function xml.collected(root,pattern,reverse) -- e
local collected = applylpath(root,pattern)
if not collected then
return dummy
- elseif reverse then
- local c = #collected + 1
+ end
+ local n = #collected
+ if n == 0 then
+ return dummy
+ end
+ if reverse then
+ local c = n + 1
return function()
if c > 1 then
c = c - 1
@@ -1420,7 +1399,7 @@ function xml.collected(root,pattern,reverse) -- e
end
end
else
- local n, c = #collected, 0
+ local c = 0
return function()
if c < n then
c = c + 1
@@ -1441,7 +1420,7 @@ end
-- texy (see xfdf):
-local function split(e)
+local function split(e) -- todo: use helpers / lpeg
local dt = e.dt
if dt then
for i=1,#dt do
diff --git a/Master/texmf-dist/tex/context/base/lxml-sor.lua b/Master/texmf-dist/tex/context/base/lxml-sor.lua
index 951017bcd3f..aba1c3b8d3f 100644
--- a/Master/texmf-dist/tex/context/base/lxml-sor.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-sor.lua
@@ -9,9 +9,12 @@ if not modules then modules = { } end modules ['lxml-sor'] = {
local format, concat, rep = string.format, table.concat, string.rep
local lpegmatch = lpeg.match
-local xml, lxml = xml, lxml
+local xml = xml
+local lxml = lxml
+local context = context
-lxml.sorters = lxml.sorters or { }
+local lxmlsorters = lxml.sorters or { }
+lxml.sorters = lxmlsorters
if not lxml.splitid then
local splitter = lpeg.C((1-lpeg.P(":"))^1) * lpeg.P("::") * lpeg.C(lpeg.P(1)^1)
@@ -27,7 +30,7 @@ end
local lists = { }
-function lxml.sorters.reset(name)
+function lxmlsorters.reset(name)
lists[name] = {
sorted = false,
entries = { },
@@ -36,7 +39,7 @@ function lxml.sorters.reset(name)
}
end
-function lxml.sorters.add(name,n,key)
+function lxmlsorters.add(name,n,key)
local list = lists[name]
if list.sorted then
-- reverse is messed up, we could regenerate it and go on
@@ -56,7 +59,7 @@ function lxml.sorters.add(name,n,key)
end
end
-function lxml.sorters.show(name)
+function lxmlsorters.show(name)
local list = lists[name]
local entries = list and list.entries
local NC, NR, bold = context.NC, context.NR, context.bold -- somehow bold is not working
@@ -92,9 +95,9 @@ function lxml.sorters.show(name)
end
end
-lxml.sorters.compare = sorters.comparers.basic -- (a,b)
+lxmlsorters.compare = sorters.comparers.basic -- (a,b)
-function lxml.sorters.sort(name)
+function lxmlsorters.sort(name)
local list = lists[name]
local entries = list and list.entries
if entries then
@@ -117,7 +120,7 @@ function lxml.sorters.sort(name)
r.split = splitter(strip(r.key))
end
-- sorting
- sorters.sort(results,lxml.sorters.compare)
+ sorters.sort(results,lxmlsorters.compare)
-- finalizing
list.nofsorted = #results
local split = { }
@@ -137,7 +140,7 @@ function lxml.sorters.sort(name)
end
end
-function lxml.sorters.flush(name,setup)
+function lxmlsorters.flush(name,setup)
local list = lists[name]
local results = list and list.results
local xmlw = context.xmlw
diff --git a/Master/texmf-dist/tex/context/base/lxml-tab.lua b/Master/texmf-dist/tex/context/base/lxml-tab.lua
index 2bb5844fcb0..8b34a96a3c3 100644
--- a/Master/texmf-dist/tex/context/base/lxml-tab.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-tab.lua
@@ -34,6 +34,8 @@ as the current variant was written when showed up and it's easier
build tables in one go.
--ldx]]--
+if lpeg.setmaxstack then lpeg.setmaxstack(1000) end -- deeply nested xml files
+
xml = xml or { }
local xml = xml
@@ -627,7 +629,6 @@ local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * val
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
-local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -745,8 +746,11 @@ local function _xmlconvert_(data, settings)
end
if errorstr and errorstr ~= "" then
result.error = true
+ else
+ errorstr = nil
end
result.statistics = {
+ errormessage = errorstr,
entities = {
decimals = dcache,
hexadecimals = hcache,
@@ -1015,25 +1019,27 @@ local function verbose_document(e,handlers)
end
local function serialize(e,handlers,...)
- local initialize = handlers.initialize
- local finalize = handlers.finalize
- local functions = handlers.functions
- if initialize then
- local state = initialize(...)
- if not state == true then
- return state
+ if e then
+ local initialize = handlers.initialize
+ local finalize = handlers.finalize
+ local functions = handlers.functions
+ if initialize then
+ local state = initialize(...)
+ if not state == true then
+ return state
+ end
+ end
+ local etg = e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ -- elseif type(e) == "string" then
+ -- functions["@tx@"](e,handlers)
+ else
+ functions["@dc@"](e,handlers) -- dc ?
+ end
+ if finalize then
+ return finalize()
end
- end
- local etg = e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- -- elseif type(e) == "string" then
- -- functions["@tx@"](e,handlers)
- else
- functions["@dc@"](e,handlers) -- dc ?
- end
- if finalize then
- return finalize()
end
end
diff --git a/Master/texmf-dist/tex/context/base/lxml-tex.lua b/Master/texmf-dist/tex/context/base/lxml-tex.lua
index 112f627511f..0503c511c45 100644
--- a/Master/texmf-dist/tex/context/base/lxml-tex.lua
+++ b/Master/texmf-dist/tex/context/base/lxml-tex.lua
@@ -27,6 +27,7 @@ local catcodenumbers = catcodes.numbers
local ctxcatcodes = catcodenumbers.ctxcatcodes -- todo: use different method
local notcatcodes = catcodenumbers.notcatcodes -- todo: use different method
+local commands = commands
local context = context
local contextsprint = context.sprint -- with catcodes (here we use fast variants, but with option for tracing)
@@ -35,6 +36,7 @@ local xmlwithelements = xml.withelements
local xmlserialize, xmlcollect, xmltext, xmltostring = xml.serialize, xml.collect, xml.text, xml.tostring
local xmlapplylpath = xml.applylpath
local xmlunprivatized, xmlprivatetoken, xmlprivatecodes = xml.unprivatized, xml.privatetoken, xml.privatecodes
+local xmlstripelement = xml.stripelement
local variables = (interfaces and interfaces.variables) or { }
@@ -456,6 +458,10 @@ function lxml.include(id,pattern,attribute,recurse)
stoptiming(xml)
end
+function lxml.save(id,name)
+ xml.save(getid(id),name)
+end
+
function xml.getbuffer(name,compress,entities) -- we need to make sure that commands are processed
if not name or name == "" then
name = tex.jobname
@@ -914,16 +920,18 @@ function lxml.setsetup(id,pattern,setup)
end
end
end
+ elseif setup == "-" then
+ for c=1,nc do
+ collected[c].command = false
+ end
+ elseif setup == "+" then
+ for c=1,nc do
+ collected[c].command = true
+ end
else
for c=1,nc do
local e = collected[c]
- if setup == "-" then
- e.command = false
- elseif setup == "+" then
- e.command = true
- else
- e.command = e.tg
- end
+ e.command = e.tg
end
end
elseif trace_setups then
@@ -966,16 +974,18 @@ function lxml.setsetup(id,pattern,setup)
end
end
end
+ elseif b == "-" then
+ for c=1,nc do
+ collected[c].command = false
+ end
+ elseif b == "+" then
+ for c=1,nc do
+ collected[c].command = true
+ end
else
for c=1,nc do
local e = collected[c]
- if b == "-" then
- e.command = false
- elseif b == "+" then
- e.command = true
- else
- e.command = a .. e.tg
- end
+ e.command = a .. e.tg
end
end
elseif trace_setups then
@@ -1185,7 +1195,7 @@ local function stripped(collected) -- tricky as we strip in place
local nc = #collected
if nc > 0 then
for c=1,nc do
- cprint(xml.stripelement(collected[c]))
+ cprint(xmlstripelement(collected[c]))
end
end
end
@@ -1310,10 +1320,11 @@ function texfinalizers.name(collected,n)
c = collected[nc-n+1]
end
if c then
- if c.ns == "" then
+ local ns = c.ns
+ if not ns or ns == "" then
contextsprint(ctxcatcodes,c.tg)
else
- contextsprint(ctxcatcodes,c.ns,":",c.tg)
+ contextsprint(ctxcatcodes,ns,":",c.tg)
end
end
end
@@ -1326,11 +1337,11 @@ function texfinalizers.tags(collected,nonamespace)
if nc > 0 then
for c=1,nc do
local e = collected[c]
- local ns, tg = e.ns, e.tg
- if nonamespace or ns == "" then
- contextsprint(ctxcatcodes,tg)
+ local ns = e.ns
+ if nonamespace or (not ns or ns == "") then
+ contextsprint(ctxcatcodes,e.tg)
else
- contextsprint(ctxcatcodes,ns,":",tg)
+ contextsprint(ctxcatcodes,ns,":",e.tg)
end
end
end
@@ -1340,11 +1351,10 @@ end
--
local function verbatim(id,before,after)
- local root = getid(id)
- if root then
- if before then contextsprint(ctxcatcodes,before,"[",root.tg or "?","]") end
- lxml.toverbatim(xmltostring(root.dt))
---~ lxml.toverbatim(xml.totext(root.dt))
+ local e = getid(id)
+ if e then
+ if before then contextsprint(ctxcatcodes,before,"[",e.tg or "?","]") end
+ lxml.toverbatim(xmltostring(e.dt)) -- lxml.toverbatim(xml.totext(e.dt))
if after then contextsprint(ctxcatcodes,after) end
end
end
@@ -1450,66 +1460,112 @@ end
lxml.index = lxml.position
function lxml.pos(id)
- local root = getid(id)
- contextsprint(ctxcatcodes,(root and root.ni) or 0)
-end
+ local e = getid(id)
+ contextsprint(ctxcatcodes,e and e.ni or 0)
+end
+
+-- function lxml.att(id,a,default)
+-- local root = getid(id)
+-- if root then
+-- local at = root.at
+-- local str = (at and at[a]) or default
+-- if str and str ~= "" then
+-- contextsprint(notcatcodes,str)
+-- end
+-- elseif default then
+-- contextsprint(notcatcodes,default)
+-- end
+-- end
+--
+-- no need for an assignment so:
function lxml.att(id,a,default)
- local root = getid(id)
- if root then
- local at = root.at
- local str = (at and at[a]) or default
- if str and str ~= "" then
- contextsprint(notcatcodes,str)
+ local e = getid(id)
+ if e then
+ local at = e.at
+ if at then
+ -- normally always true
+ local str = at[a]
+ if not str then
+ if default and default ~= "" then
+ contextsprint(notcatcodes,default)
+ end
+ elseif str ~= "" then
+ contextsprint(notcatcodes,str)
+ end
+ elseif default and default ~= "" then
+ contextsprint(notcatcodes,default)
end
- elseif default then
+ elseif default and default ~= "" then
contextsprint(notcatcodes,default)
end
end
function lxml.name(id) -- or remapped name? -> lxml.info, combine
- local r = getid(id)
- local ns = r.rn or r.ns or ""
- if ns ~= "" then
- contextsprint(ctxcatcodes,ns,":",r.tg)
- else
- contextsprint(ctxcatcodes,r.tg)
+ local e = getid(id)
+ if e then
+ local ns = e.rn or e.ns
+ if ns and ns ~= "" then
+ contextsprint(ctxcatcodes,ns,":",e.tg)
+ else
+ contextsprint(ctxcatcodes,e.tg)
+ end
end
end
function lxml.match(id) -- or remapped name? -> lxml.info, combine
- contextsprint(ctxcatcodes,getid(id).mi or 0)
+ local e = getid(id)
+ contextsprint(ctxcatcodes,e and e.mi or 0)
end
function lxml.tag(id) -- tag vs name -> also in l-xml tag->name
- contextsprint(ctxcatcodes,getid(id).tg or "")
+ local e = getid(id)
+ if e then
+ local tg = e.tg
+ if tg and tg ~= "" then
+ contextsprint(ctxcatcodes,tg)
+ end
+ end
end
function lxml.namespace(id) -- or remapped name?
- local root = getid(id)
- contextsprint(ctxcatcodes,root.rn or root.ns or "")
+ local e = getid(id)
+ if e then
+ local ns = e.rn or e.ns
+ if ns and ns ~= "" then
+ contextsprint(ctxcatcodes,ns)
+ end
+ end
end
function lxml.flush(id)
- id = getid(id)
- local dt = id and id.dt
- if dt then
- xmlsprint(dt)
+ local e = getid(id)
+ if e then
+ local dt = e.dt
+ if dt then
+ xmlsprint(dt)
+ end
end
end
function lxml.snippet(id,i)
local e = getid(id)
if e then
- local edt = e.dt
- if edt then
- xmlsprint(edt[i])
+ local dt = e.dt
+ if dt then
+ local dti = dt[i]
+ if dti then
+ xmlsprint(dti)
+ end
end
end
end
function lxml.direct(id)
- xmlsprint(getid(id))
+ local e = getid(id)
+ if e then
+ xmlsprint(e)
+ end
end
function lxml.command(id,pattern,cmd)
@@ -1574,12 +1630,17 @@ statistics.register("xml load time", function()
end)
statistics.register("lxml preparation time", function()
- local calls, cached = xml.lpathcalls(), xml.lpathcached()
- if calls > 0 or cached > 0 then
- return format("%s seconds, %s nodes, %s lpath calls, %s cached calls",
- statistics.elapsedtime(lxml), nofindices, calls, cached)
+ if noffiles > 0 or nofconverted > 0 then
+ local calls = xml.lpathcalls()
+ local cached = xml.lpathcached()
+ if calls > 0 or cached > 0 then
+ return format("%s seconds, %s nodes, %s lpath calls, %s cached calls",
+ statistics.elapsedtime(lxml), nofindices, calls, cached)
+ else
+ return nil
+ end
else
- return nil
+ -- pretty close to zero so not worth mentioning
end
end)
diff --git a/Master/texmf-dist/tex/context/base/m-chart.lua b/Master/texmf-dist/tex/context/base/m-chart.lua
index c4da2eb6378..2b9869379f1 100644
--- a/Master/texmf-dist/tex/context/base/m-chart.lua
+++ b/Master/texmf-dist/tex/context/base/m-chart.lua
@@ -19,7 +19,7 @@ local P, S, C, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.match
local report_chart = logs.reporter("chart")
-local points = number.points
+local points = number.points -- we can use %p instead
local variables = interfaces.variables
@@ -499,7 +499,7 @@ local function process_cells(chart,xoffset,yoffset)
local linesettings = settings.line
context("flow_shape_line_color := \\MPcolor{%s} ;", linesettings.color)
context("flow_shape_fill_color := \\MPcolor{%s} ;", linesettings.backgroundcolor)
- context("flow_shape_line_width := %s ; ", points(linesettingsrulethickness))
+ context("flow_shape_line_width := %s ; ", points(linesettings.rulethickness))
elseif focus[cell.focus] or focus[cell.name] then
local focussettings = settings.focus
context("flow_shape_line_color := \\MPcolor{%s} ;", focussettings.framecolor)
@@ -580,7 +580,7 @@ local function process_connections(chart,xoffset,yoffset)
context("flow_touchshape := %s ;", linesettings.offset == v_none and "true" or "false")
context("flow_dsp_x := %s ; flow_dsp_y := %s ;",connection.dx or 0, connection.dy or 0)
context("flow_connection_line_color := \\MPcolor{%s} ;",linesettings.color)
- context("flow_connection_line_width := 2pt ;",points(linesettings.rulethickness))
+ context("flow_connection_line_width := %s ;",points(linesettings.rulethickness))
context("flow_connect_%s_%s (%s) (%s,%s,%s) (%s,%s,%s) ;",where_cell,where_other,j,cellx,celly,what_cell,otherx,othery,what_other)
context("flow_dsp_x := 0 ; flow_dsp_y := 0 ;")
end
diff --git a/Master/texmf-dist/tex/context/base/m-database.lua b/Master/texmf-dist/tex/context/base/m-database.lua
index 47854daa023..91e9636eeb0 100644
--- a/Master/texmf-dist/tex/context/base/m-database.lua
+++ b/Master/texmf-dist/tex/context/base/m-database.lua
@@ -6,20 +6,22 @@ if not modules then modules = { } end modules ['m-database'] = {
license = "see context related readme files"
}
-local sub, gmatch, format = string.sub, string.gmatch, string.format
+local sub, gmatch = string.sub, string.gmatch
local concat = table.concat
local lpegpatterns, lpegmatch, lpegsplitat = lpeg.patterns, lpeg.match, lpeg.splitat
-local lpegP, lpegC, lpegS, lpegCt = lpeg.P, lpeg.C, lpeg.S, lpeg.Ct
+local lpegP, lpegC, lpegS, lpegCt, lpegCc, lpegCs = lpeg.P, lpeg.C, lpeg.S, lpeg.Ct, lpeg.Cc, lpeg.Cs
local stripstring = string.strip
+moduledata.database = moduledata.database or { }
+moduledata.database.csv = moduledata.database.csv or { }
+
-- One also needs to enable context.trace, here we only plug in some code (maybe
-- some day this tracker will also toggle the main context tracer.
-local trace_flush = false trackers.register("module.database.flush", function(v) trace_flush = v end)
-
+local trace_flush = false trackers.register("module.database.flush", function(v) trace_flush = v end)
local report_database = logs.reporter("database")
-buffers.database = buffers.database or { }
+local context = context
local l_tab = lpegpatterns.tab
local l_space = lpegpatterns.space
@@ -36,7 +38,7 @@ local separators = { -- not interfaced
spaces = l_space^1,
}
-function buffers.database.process(settings)
+function moduledata.database.csv.process(settings)
local data
if settings.type == "file" then
local filename = resolvers.finders.byscheme("any",settings.database)
@@ -46,6 +48,8 @@ function buffers.database.process(settings)
data = buffers.getlines(settings.database)
end
if data and #data > 0 then
+ local catcodes = tonumber(settings.catcodes) or tex.catcodetable
+ context.pushcatcodes(catcodes)
if trace_flush then
context.pushlogger(report_database)
end
@@ -55,7 +59,7 @@ function buffers.database.process(settings)
local left, right = settings.left or "", settings.right or ""
local setups = settings.setups or ""
local strip = settings.strip == v_yes or false
- local command = settings.command
+ local command = settings.command or ""
separatorchar = (not separatorchar and ",") or separators[separatorchar] or separatorchar
local separator = type(separatorchar) == "string" and lpegS(separatorchar) or separatorchar
local whatever = lpegC((1 - separator)^0)
@@ -63,7 +67,7 @@ function buffers.database.process(settings)
local quotedata = nil
for chr in gmatch(quotechar,".") do
local quotechar = lpegP(chr)
- local quoteword = l_space^0 * quotechar * lpegC((1 - quotechar)^0) * quotechar * l_space^0
+ local quoteword = lpegCs(((l_space^0 * quotechar)/"") * (1 - quotechar)^0 * ((quotechar * l_space^0)/""))
if quotedata then
quotedata = quotedata + quoteword
else
@@ -73,12 +77,34 @@ function buffers.database.process(settings)
whatever = quotedata + whatever
end
local checker = commentchar ~= "" and lpegS(commentchar)
- local splitter = lpegCt(whatever * (separator * whatever)^0)
+ if strip then
+ whatever = whatever / stripstring
+ end
+ if left ~= "" then
+ whatever = lpegCc(left) * whatever
+ end
+ if right ~= "" then
+ whatever = whatever * lpegCc(right)
+ end
+ if command ~= "" then
+ whatever = lpegCc("{") * whatever * lpegCc("}")
+ end
+ whatever = whatever * (separator/"" * whatever)^0
+ if first ~= "" then
+ whatever = lpegCc(first) * whatever
+ end
+ if last ~= "" then
+ whatever = whatever * lpegCc(last)
+ end
+ if command ~= "" then
+ whatever = lpegCs(lpegCc(command) * whatever)
+ else
+ whatever = lpegCs(whatever)
+ end
local found = false
for i=1,#data do
local line = data[i]
if not lpegmatch(l_empty,line) and (not checker or not lpegmatch(checker,line)) then
- local list = lpegmatch(splitter,line)
if not found then
if setups ~= "" then
context.begingroup()
@@ -87,39 +113,7 @@ function buffers.database.process(settings)
context(before)
found = true
end
- if trace_flush then
- local result, r = { }, 0
- r = r + 1 ; result[r] = first
- for j=1,#list do
- local str = strip and stripstring(list[j]) or list[j]
- r = r + 1 ; result[r] = left
- if command == "" then
- r = r + 1 ; result[r] = str
- else
- r = r + 1 ; result[r] = command
- r = r + 1 ; result[r] = "{"
- r = r + 1 ; result[r] = str
- r = r + 1 ; result[r] = "}"
- end
- r = r + 1 ; result[r] = right
- end
- r = r + 1 ; result[r] = last
- context(concat(result))
- else
- context(first)
- for j=1,#list do
- local str = strip and stripstring(list[j]) or list[j]
- context(left)
- if command == "" then
- context(str)
- else
- context(command)
- context(false,str)
- end
- context(right)
- end
- context(last)
- end
+ context(lpegmatch(whatever,line))
end
end
if found then
@@ -128,6 +122,7 @@ function buffers.database.process(settings)
context.endgroup()
end
end
+ context.popcatcodes()
if trace_flush then
context.poplogger()
end
diff --git a/Master/texmf-dist/tex/context/base/m-database.mkiv b/Master/texmf-dist/tex/context/base/m-database.mkiv
index 0285d3bcd1b..cc7dd3d7207 100644
--- a/Master/texmf-dist/tex/context/base/m-database.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-database.mkiv
@@ -52,7 +52,7 @@
\let\currentdatabasename\currentdatabase
\let\currentdatabase\empty
\fi
- \ctxlua{buffers.database.process {
+ \ctxlua{moduledata.database.csv.process {
name = "\currentdatabase",
type = "\currentdatabasetype",
database = "\currentdatabasename",
@@ -68,6 +68,7 @@
left = \!!bs\databaseparameter\c!left \!!es,
right = \!!bs\databaseparameter\c!right \!!es,
command = \!!bs\databaseparameter\c!command \!!es,
+ catcodes = \number\catcodetable
}}}
\unexpanded\def\processdatabasebuffer{\dodoubleempty\module_database_process_buffer}
@@ -120,8 +121,7 @@
first={\endgraf[},
last={]\endgraf},
left={ (},
- right={) },
- command=\ruledhbox]
+ right={) }]
\startbuffer[testbuffer]
1,2,3,4,5
diff --git a/Master/texmf-dist/tex/context/base/m-graph.mkiv b/Master/texmf-dist/tex/context/base/m-graph.mkiv
index c15262cac71..62c4ec4cb87 100644
--- a/Master/texmf-dist/tex/context/base/m-graph.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-graph.mkiv
@@ -17,45 +17,6 @@
\unprotect
-\startluacode
- local format, gsub, find, match = string.format, string.gsub, string.find, string.match
-
- local simplify = true
-
- local function strip(n,e)
- -- get rid of e(0)
- -- get rid of e(+*)
- e = gsub(e,"^+","")
- -- remove leading zeros
- e = gsub(e,"^([+-]*)0+(%d)","%1%2")
- if not simplify then
- -- take it as it is
- elseif n == "1" then
- return format("10^{%s}",e)
- end
- return format("%s\\times10^{%s}",n,e)
- end
-
- function metapost.format_n(fmt,str)
- fmt = gsub(fmt,"@","%%")
- local initial, hasformat, final = match(fmt,"^(.-)(%%.-[%a])(.-)$")
- if hasformat then
- str = format(fmt,str)
- str = gsub(str,"(.-)e(.-)$",strip)
- str = format("%s\\mathematics{%s}%s",initial,str,final)
- elseif not find(fmt,"%%") then
- str = format("%"..fmt,str)
- str = gsub(str,"(.-)e(.-)$",strip)
- str = format("\\mathematics{%s}",str)
- end
- context(str)
- end
-\stopluacode
-
-\unexpanded\def\MPgraphformat#1#2{\ctxlua{metapost.format_n("#1","#2")}}
-
-% We could also delegate parsing using lower level plugins.
-
\defineMPinstance
[graph]
[\s!format=metafun,
@@ -64,37 +25,54 @@
\c!method=\s!double]
\startMPdefinitions{graph}
- if unknown context_grap: input "mp-grap.mpiv" ; fi ;
-\stopMPdefinitions
-
-% For backwards compatibility (for the moment), also load the graph macros in
-% the standard MP instance (scaled integer):
-
-\startMPdefinitions
- if unknown context_grap: input "mp-grap.mpiv" ; fi ;
+ if unknown context_grap : input mp-grap.mpiv ; fi ;
\stopMPdefinitions
\protect
\continueifinputfile{m-graph.mkiv}
+%D We put this test here as in \type {meta-tex.mkiv} it would abort due to redefinition
+%D of namespaces.
+
\starttext
\startMPpage[instance=graph]
- label(format("@g","1e-8"), (0, 0)) ;
- label(format("@g","1e+8"), (2cm, 0)) ;
- label(format("@g","1e-10"), (0, -0.5cm)) ;
- label(format("@g","1e+10"), (2cm,-0.5cm)) ;
- label(format("@g","1e-12"), (0, -1.0cm)) ;
- label(format("@g","1e+12"), (2cm,-1.0cm)) ;
- label(format("@g","1e-0"), (0, -1.5cm)) ;
- label(format("@g","1e+0"), (2cm,-1.5cm)) ;
- label(format("@g","1"), (0, -2.0cm)) ;
- label(format("@g","1"), (2cm,-2.0cm)) ;
- label(format("@g","1e-102"),(0, -2.5cm)) ;
- label(format("@g","1e+102"),(2cm,-2.5cm)) ;
+ label.rt(format("@g","1e-8"), (0, 0)) ;
+ label.rt(format("@g","1e+8"), (2cm, 0)) ;
+ label.rt(format("@g","1e-10"), (0, -0.5cm)) ;
+ label.rt(format("@g","1e+10"), (2cm,-0.5cm)) ;
+ label.rt(format("@g","1e-12"), (0, -1.0cm)) ;
+ label.rt(format("@g","1e+12"), (2cm,-1.0cm)) ;
+ label.rt(format("@g","1e-0"), (0, -1.5cm)) ;
+ label.rt(format("@g","1e+0"), (2cm,-1.5cm)) ;
+ label.rt(format("@g","1"), (0, -2.0cm)) ;
+ label.rt(format("@g","1"), (2cm,-2.0cm)) ;
+ label.rt(format("@g","1e-102"),(0, -2.5cm)) ;
+ label.rt(format("@g","1e+102"),(2cm,-2.5cm)) ;
+ currentpicture := currentpicture shifted (-4cm,0) ;
+ %
+ label.rt(format("@j","1e-8"), (0, 0)) ;
+ label.rt(format("@j","1e+8"), (2cm, 0)) ;
+ label.rt(format("@j","1e-10"), (0, -0.5cm)) ;
+ label.rt(format("@j","1e+10"), (2cm,-0.5cm)) ;
+ label.rt(format("@j","1e-12"), (0, -1.0cm)) ;
+ label.rt(format("@j","1e+12"), (2cm,-1.0cm)) ;
+ label.rt(format("@j","1e-0"), (0, -1.5cm)) ;
+ label.rt(format("@j","1e+0"), (2cm,-1.5cm)) ;
+ label.rt(format("@j","1"), (0, -2.0cm)) ;
+ label.rt(format("@j","1"), (2cm,-2.0cm)) ;
+ label.rt(format("@j","1e-102"),(0, -2.5cm)) ;
+ label.rt(format("@j","1e+102"),(2cm,-2.5cm)) ;
+ label.rt(formatted("(@f,@f)",(1.23,4.56)),(0cm,-3.0cm)) ;
+ label.rt(formatted("(@i,@i)",(1.23,4.56)),(0cm,-3.5cm)) ;
+ label.rt(formatted("(@g,@g)",(1.23,4.56)),(0cm,-4.0cm)) ;
+ label.rt(formatted("(@e,@e)",(1.23,4.56)),(0cm,-4.5cm)) ;
+ label.rt(formatted("(@j,@j)",(1.23,4.56)),(0cm,-5.0cm)) ;
\stopMPpage
+\stoptext
+
% \startMPpage[instance=graph]
% draw begingraph(3in,2in);
% gdraw "t:/metapost/grphdata/agepop91.d";
@@ -153,5 +131,3 @@
% autogrid(otick.bot,otick.rt);
% endgraph;
% \stopMPpage
-
-\stoptext
diff --git a/Master/texmf-dist/tex/context/base/m-hemistich.mkiv b/Master/texmf-dist/tex/context/base/m-hemistich.mkiv
new file mode 100644
index 00000000000..55fde7b9283
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/m-hemistich.mkiv
@@ -0,0 +1,112 @@
+%D \module
+%D [ file=m-hemistich,
+%D version=2013.08.26,
+%D title=\CONTEXT\ Extra Modules,
+%D subtitle=Hemistiches,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is an experimental module for Idris. More is possible but not now.
+
+\unprotect
+
+\installcorenamespace{hemistich}
+
+\installcommandhandler \??hemistich {hemistich} \??hemistich
+
+\setuphemistich
+ [\c!width=\v!local,
+ \c!distance=4\emwidth,
+ \c!separator=\vl\hskip.25em\vl]
+
+\unexpanded\def\hemistiches
+ {\dosingleempty\dohemistiches}
+
+\unexpanded\def\dohemistiches
+ {\dodohemistiches\empty}
+
+\unexpanded\def\dodohemistiches#1[#2]#3#4%
+ {\dontleavehmode
+ \begingroup
+ \doifassignmentelse{#2}
+ {\edef\currenthemistich{#1}%
+ \setupcurrenthemistich[#2]}
+ {\def\currenthemistich{#2}}%
+ \doifelse{\hemistichparameter\c!width}\v!local
+ {\scratchwidth\availablehsize}
+ {\scratchwidth\hemistichparameter\c!width\relax}%
+ \spaceskip\zeropoint\s!plus\plusone\s!fill\relax
+ \hbox to \scratchwidth\bgroup
+ \scratchwidth.5\dimexpr\scratchwidth-\hemistichparameter\c!distance\relax
+ \hbox to \scratchwidth\bgroup
+ \usehemistichstyleandcolor\c!leftstyle\c!leftcolor#3%
+ \egroup
+ \hss
+ \begingroup
+ \usehemistichstyleandcolor\c!separatorstyle\c!separatorcolor
+ \hemistichparameter\c!separator
+ \endgroup
+ \hss
+ \hbox to \scratchwidth\bgroup
+ \usehemistichstyleandcolor\c!rightstyle\c!rightcolor#4%
+ \egroup
+ \egroup
+ \endgroup}
+
+\unexpanded\def\hemistichescaesura#1#2#3%
+ {\dodohemistiches\empty[\c!separator={#2}]{#1}{#3}}
+
+\appendtoks
+ \setvalue{\currenthemistich}{\dohemistiches{\currenthemistich}}%
+\to \everydefinehemistich
+
+\protect
+
+\continueifinputfile{m-hemistich.mkiv}
+
+\setuphemistich
+ [leftcolor=darkred,
+ separatorcolor=darkgreen,
+ rightcolor=darkblue]
+
+\setupwhitespace
+ [big]
+
+\starttext
+
+% \righttoleft
+
+\hemistichescaesura{left side of the brain}{equals}{right side of the brain}
+
+\hemistiches{left side of the brain}{right side of the brain}
+
+\startitemize
+ \startitem
+ \hemistiches{left side of the brain}{right side of the brain}
+ \startitemize
+ \startitem
+ \hemistiches{left side of the brain}{right side of the brain}
+ \startitemize
+ \startitem
+ \hemistiches{left side of the brain}{right side of the brain}
+ \stopitem
+ \stopitemize
+ \stopitem
+ \stopitemize
+ \startitem
+ \hemistiches{left side of the brain}{right side of the brain}
+ \stopitem
+ \stopitem
+\stopitemize
+
+\startitemize
+\item \hemistiches{left side of the brain}{right side of the brain}
+\stopitemize
+
+\stoptext
+
diff --git a/Master/texmf-dist/tex/context/base/m-nodechart.lua b/Master/texmf-dist/tex/context/base/m-nodechart.lua
new file mode 100644
index 00000000000..612b737672a
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/m-nodechart.lua
@@ -0,0 +1,175 @@
+if not modules then modules = { } end modules ['m-nodechart'] = {
+ version = 1.001,
+ comment = "companion to m-nodechart.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+local points = number.nopts
+local ptfactor = number.dimenfactors.pt
+
+local nodecodes = nodes.nodecodes
+local kerncodes = nodes.kerncodes
+local penaltycodes = nodes.penaltycodes
+local gluecodes = nodes.gluecodes
+local whatsitcodes = nodes.whatsitcodes
+
+moduledata.charts = moduledata.charts or { }
+moduledata.charts.nodes = moduledata.charts.nodes or { }
+
+local formatters = { }
+
+-- subtype font char lang left right uchyph components xoffset yoffset width height depth
+
+function formatters.glyph(n,comment)
+ return format("\\doFLOWglyphnode{%s}{%s}{%s}{%s}{U+%05X}",comment,n.subtype,n.font,n.char,n.char)
+end
+
+-- pre post replace
+
+function formatters.disc(n,comment)
+ return format("\\doFLOWdiscnode{%s}{%s}",comment,n.subtype)
+end
+
+-- subtype kern
+
+function formatters.kern(n,comment)
+ -- return format("\\doFLOWkernnode{%s}{%s}{%s}",comment,kerncodes[n.subtype],points(n.kern))
+ return format("\\doFLOWkernnode{%s}{%s}{%.4f}",comment,kerncodes[n.subtype],n.kern*ptfactor)
+end
+
+-- subtype penalty
+
+function formatters.penalty(n,comment)
+ return format("\\doFLOWpenaltynode{%s}{%s}{%s}",comment,"penalty",n.penalty)
+end
+
+-- subtype width leader spec (stretch shrink ...
+
+function formatters.glue(n,comment)
+ local s = n.spec
+ -- return format("\\doFLOWgluenode{%s}{%s}{%s}{%s}{%s}",comment,gluecodes[n.subtype],points(s.width),points(s.stretch),points(s.shrink))
+ return format("\\doFLOWgluenode{%s}{%s}{%.4f}{%.4f}{%.4f}",comment,gluecodes[n.subtype],s.width*ptfactor,s.stretch*ptfactor,s.shrink*ptfactor)
+end
+
+-- subtype width leader spec (stretch shrink ...
+
+function formatters.whatsit(n,comment)
+ local subtype = n.subtype
+ local whatsit = whatsitcodes[subtype]
+ if whatsit == "dir" or whatsit == "localpar" then
+ return format("\\doFLOWdirnode{%s}{%s}{%s}",comment,whatsit,n.dir)
+ else
+ return nodecodes[n.id]
+ end
+end
+
+-- I will make a dedicated set of shapes for this.
+
+local shapes = {
+ glyph = "procedure",
+ disc = "procedure",
+ kern = "action",
+ penalty = "action",
+ glue = "action",
+}
+
+local function flow_nodes_to_chart(specification)
+ local head = specification.head
+ local box = specification.box
+ local comment = specification.comment or ""
+ local x = specification.x or 1
+ local y = specification.y or 0
+ --
+ if box then
+ box = tex.getbox(tonumber(box))
+ head = box and box.list
+ end
+ --
+ local current = head
+ --
+ while current do
+ local nodecode = nodecodes[current.id]
+ local formatter = formatters[nodecode]
+ local shape = shapes[nodecode]
+ y = y + 1
+ local next = current.next
+ commands.flow_start_cell { shape = { framecolor = "nodechart:" .. nodecode } }
+ commands.flow_set_name(tostring(current))
+ commands.flow_set_location(x,y)
+ if shape then
+ commands.flow_set_shape(shape)
+ end
+ if formatter then
+ commands.flow_set_text("node",formatter(current,comment))
+ else
+ commands.flow_set_text("node",nodecode)
+ end
+ if next then
+ commands.flow_set_connection("bt","",tostring(next))
+ end
+ if nodecode == "glyph" then
+ local components = current.components
+ if components then
+ commands.flow_set_connection("rl","",tostring(components))
+ commands.flow_stop_cell()
+ n = flow_nodes_to_chart { head = components, comment = "component",x = x+2, y = y-1 }
+ else
+ commands.flow_stop_cell()
+ end
+ elseif nodecode == "disc" then
+ local pre = current.pre
+ local pos = current.post
+ local rep = current.replace
+ if pre and not rep and not rep then
+ if pre then
+ commands.flow_set_connection("rl","",tostring(pre))
+ end
+ commands.flow_stop_cell()
+ if pre then
+ n = flow_nodes_to_chart { head = pre, comment = "prebreak", x = x+1, y = y-1 }
+ end
+ else
+ if pre then
+ commands.flow_set_connection("+rl","",tostring(pre))
+ end
+ if rep then
+ commands.flow_set_connection("rl","",tostring(rep))
+ end
+ if pos then
+ commands.flow_set_connection("-rl","",tostring(pos))
+ end
+ commands.flow_stop_cell()
+ if pre then
+ n = flow_nodes_to_chart{ head = pre, comment = "prebreak", x = x+1, y = y-2 }
+ end
+ if rep then
+ n = flow_nodes_to_chart{ head = rep, comment = "replacement", x = x+1, y = y-1 }
+ end
+ if pos then
+ n = flow_nodes_to_chart{ head = pos, comment = "postbreak", x = x+1, y = y }
+ end
+ end
+ elseif nodecode == "hlist" then
+ local list = current.list
+ if list then
+ commands.flow_set_connection("rl","",tostring(list))
+ commands.flow_stop_cell()
+ n = flow_nodes_to_chart { head = list, comment = "list", x = x+2, y = y-1 }
+ else
+ commands.flow_stop_cell()
+ end
+ else
+ commands.flow_stop_cell()
+ end
+ current = next
+ end
+end
+
+function moduledata.charts.nodes.chart(specification)
+ commands.flow_start_chart(specification.name)
+ flow_nodes_to_chart(specification)
+ commands.flow_stop_chart()
+end
diff --git a/Master/texmf-dist/tex/context/base/m-nodechart.mkvi b/Master/texmf-dist/tex/context/base/m-nodechart.mkvi
index 359d598ce62..c9d9858506c 100644
--- a/Master/texmf-dist/tex/context/base/m-nodechart.mkvi
+++ b/Master/texmf-dist/tex/context/base/m-nodechart.mkvi
@@ -1,154 +1,19 @@
-\usemodule[chart]
+%D \module
+%D [ file=m-nodechart,
+%D version=2011.11.11, % nos sure when it started, needed for fonts-mkiv
+%D title=\CONTEXT\ Modules,
+%D subtitle=Node Visualization,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
-\startluacode
-
-local format = string.format
-local points = number.nopts
-local ptfactor = number.dimenfactors.pt
-
-local nodecodes = nodes.nodecodes
-local kerncodes = nodes.kerncodes
-local penaltycodes = nodes.penaltycodes
-local gluecodes = nodes.gluecodes
-local whatsitcodes = nodes.whatsitcodes
-
-local formatters = { }
-
-function formatters.glyph(n,comment)
- -- subtype font char lang left right uchyph components xoffset yoffset width height depth
- return format("\\doFLOWglyphnode{%s}{%s}{%s}{%s}{U+%05X}",comment,n.subtype,n.font,n.char,n.char)
-end
-
-function formatters.disc(n,comment)
- -- pre post replace
- return format("\\doFLOWdiscnode{%s}{%s}",comment,n.subtype)
-end
-
-function formatters.kern(n,comment)
- -- subtype kern
- -- return format("\\doFLOWkernnode{%s}{%s}{%s}",comment,kerncodes[n.subtype],points(n.kern))
- return format("\\doFLOWkernnode{%s}{%s}{%.4f}",comment,kerncodes[n.subtype],n.kern*ptfactor)
-end
-
-function formatters.penalty(n,comment)
- -- subtype penalty
- return format("\\doFLOWpenaltynode{%s}{%s}{%s}",comment,"penalty",n.penalty)
-end
-
-function formatters.glue(n,comment)
- -- subtype width leader spec (stretch shrink ...
- local s = n.spec
- -- return format("\\doFLOWgluenode{%s}{%s}{%s}{%s}{%s}",comment,gluecodes[n.subtype],points(s.width),points(s.stretch),points(s.shrink))
- return format("\\doFLOWgluenode{%s}{%s}{%.4f}{%.4f}{%.4f}",comment,gluecodes[n.subtype],s.width*ptfactor,s.stretch*ptfactor,s.shrink*ptfactor)
-end
-
-function formatters.whatsit(n,comment)
- -- subtype width leader spec (stretch shrink ...
- local subtype = n.subtype
- local whatsit = whatsitcodes[subtype]
- if whatsit == "dir" or whatsit == "localpar" then
- return format("\\doFLOWdirnode{%s}{%s}{%s}",comment,whatsit,n.dir)
- else
- return nodecodes[n.id]
- end
-end
-
-local shapes = { -- I will make a dedicated set of shapes for this.
- glyph = "procedure",
- disc = "procedure",
- kern = "action",
- penalty = "action",
- glue = "action",
-}
-
-local function flow_nodes_to_chart(head,comment,x,y,how)
- local current = head
- while current do
- local nodecode = nodecodes[current.id]
- local formatter = formatters[nodecode]
- local shape = shapes[nodecode]
- y = y + 1
- local next = current.next
- commands.flow_start_cell { shape = { framecolor = "nodechart:" .. nodecode } }
- commands.flow_set_name(tostring(current))
- commands.flow_set_location(x,y)
- if shape then
- commands.flow_set_shape(shape)
- end
- if formatter then
- commands.flow_set_text("node",formatter(current,comment))
- else
- commands.flow_set_text("node",nodecode)
- end
- if next then
- commands.flow_set_connection("bt","",tostring(next))
- end
- if nodecode == "glyph" then
- local components = current.components
- if components then
- commands.flow_set_connection("rl","",tostring(components))
- commands.flow_stop_cell()
- n = flow_nodes_to_chart(components,"component",x+2,y-1)
- else
- commands.flow_stop_cell()
- end
- elseif nodecode == "disc" then
- local pre = current.pre
- local pos = current.post
- local rep = current.replace
- if pre and not rep and not rep then
- if pre then
- commands.flow_set_connection("rl","",tostring(pre))
- end
- commands.flow_stop_cell()
- if pre then
- n = flow_nodes_to_chart(pre,"prebreak",x+1,y-1)
- end
- else
- if pre then
- commands.flow_set_connection("+rl","",tostring(pre))
- end
- if rep then
- commands.flow_set_connection("rl","",tostring(rep))
- end
- if pos then
- commands.flow_set_connection("-rl","",tostring(pos))
- end
- commands.flow_stop_cell()
- if pre then
- n = flow_nodes_to_chart(pre,"prebreak",x+1,y-2)
- end
- if rep then
- n = flow_nodes_to_chart(rep,"replacement",x+1,y-1)
- end
- if pos then
- n = flow_nodes_to_chart(pos,"postbreak",x+1,y)
- end
- end
- elseif nodecode == "hlist" then
- local list = current.list
- if list then
- commands.flow_set_connection("rl","",tostring(list))
- commands.flow_stop_cell()
- n = flow_nodes_to_chart(list,"list",x+2,y-1)
- else
- commands.flow_stop_cell()
- end
- else
- commands.flow_stop_cell()
- end
- current = next
- end
- return n
-end
-
-function commands.flow_nodes_to_chart(name,head,max)
- commands.flow_start_chart(name)
- flow_nodes_to_chart(head,"",1,0)
- commands.flow_stop_chart()
-end
-
-\stopluacode
+\registerctxluafile{m-nodechart}{1.001}
+
+\usemodule[chart]
\unprotect
@@ -200,17 +65,20 @@ end
% this is a temporary interface ... we will have instances and optional settings
-\unexpanded\def\boxtoFLOWchart#name#max#box%
- {\ctxcommand{flow_nodes_to_chart("#name",tex.box[\number#box].list,\number#max)}}
+\unexpanded\def\boxtoFLOWchart[#name]#box%
+ {\ctxlua{moduledata.charts.nodes.chart {
+ name = "#name",
+ box = \number#box,
+ }}}
-\unexpanded\def\nextboxtoFLOWchart#name#max%
- {\dowithnextbox{\boxtoFLOWchart{#name}{#max}\nextbox}}
+\unexpanded\def\nextboxtoFLOWchart[#name]%
+ {\dowithnextbox{\boxtoFLOWchart[#name]\nextbox}}
-\unexpanded\def\hboxtoFLOWchart#name#max%
- {\nextboxtoFLOWchart{#name}{#max}\hbox}
+\unexpanded\def\hboxtoFLOWchart[#name]%
+ {\nextboxtoFLOWchart[#name]\hbox}
-\unexpanded\def\vboxtoFLOWchart#name#max%
- {\nextboxtoFLOWchart{#name}{#max}\vbox}
+\unexpanded\def\vboxtoFLOWchart[#name]%
+ {\nextboxtoFLOWchart[#name]\vbox}
\protect
@@ -224,7 +92,7 @@ end
\startTEXpage[offset=10pt]
- \hboxtoFLOWchart{dummy}{3}{an affil\discretionary{-}{-}{!}iation}
+ \hboxtoFLOWchart[dummy]{an affil\discretionary{-}{-}{!}iation}
\FLOWchart[dummy][width=14em,height=3em,dx=1em,dy=.75em,hcompact=yes]
@@ -232,7 +100,7 @@ end
\startTEXpage[offset=10pt]
- \hboxtoFLOWchart{dummy}{3}{an affiliation}
+ \hboxtoFLOWchart[dummy]{an affiliation}
\FLOWchart[dummy][width=14em,height=3em,dx=.5em,dy=.75em,hcompact=yes]
@@ -240,7 +108,7 @@ end
\startTEXpage[offset=10pt]
- \hboxtoFLOWchart{dummy}{3}{\nl effe fijn fietsen}
+ \hboxtoFLOWchart[dummy]{\nl effe fijn fietsen}
\FLOWchart[dummy][width=14em,height=3em,dx=.5em,dy=.75em,hcompact=yes]
@@ -248,7 +116,7 @@ end
\startTEXpage[offset=10pt]
- \hboxtoFLOWchart{dummy}{3}{\righttoleft t\kern 1pt est}
+ \hboxtoFLOWchart[dummy]{\righttoleft t\kern 1pt est}
\FLOWchart[dummy][width=14em,height=3em,dx=.5em,dy=.75em,hcompact=yes]
diff --git a/Master/texmf-dist/tex/context/base/m-oldbibtex.mkiv b/Master/texmf-dist/tex/context/base/m-oldbibtex.mkiv
new file mode 100644
index 00000000000..08c23e7cc87
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/m-oldbibtex.mkiv
@@ -0,0 +1,16 @@
+%D \module
+%D [ file=m-oldbibtex,
+%D version=2013.12.12, % based on bibl-apa.tex and later xml variant
+%D title=Falback on old method,
+%D subtitle=Publications,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+\loadmarkfile{bibl-bib}
+\loadmarkfile{bibl-tra}
+
+\endinput
diff --git a/Master/texmf-dist/tex/context/base/m-punk.mkiv b/Master/texmf-dist/tex/context/base/m-punk.mkiv
index 23b477cb677..6bf92e4c0f3 100644
--- a/Master/texmf-dist/tex/context/base/m-punk.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-punk.mkiv
@@ -110,7 +110,10 @@ function metapost.characters.process(mpxformat, name, instances, scalefactor)
data
},
false,
- flusher
+ flusher,
+ false,
+ false,
+ "all"
)
lists[i] = {
characters = characters,
diff --git a/Master/texmf-dist/tex/context/base/m-r.mkii b/Master/texmf-dist/tex/context/base/m-r.mkii
new file mode 100644
index 00000000000..c2cb7ba8816
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/m-r.mkii
@@ -0,0 +1,174 @@
+%D \module
+%D [ file=m-r,
+%D version=2006.06.06,
+%D title=\CONTEXT\ Modules,
+%D subtitle=R Support,
+%D author={Johan Sandblom \& Hans Hagen},
+%D date=\currentdate,
+%D copyright={Johan Sandblom \& Hans Hagen}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+%D The following R-processor is a variation on Johan Sandblom's
+%D prototype.
+%D
+%D We can combine both variants in one macro definition. Also, we
+%D can minimize the number of runs by checking for a change.
+
+%D JS: The call to R has \type {-q} in order to prevent banner,
+%D \type {--save} to make sure it saves the workspace after the run,
+%D \type {--restore} to make sure it reads any workspace from a
+%D previous session.
+
+%D An easier and better solution is to use the buffering mechanisms:
+
+\def\Rbufferprefix{r-}
+
+\newcounter\nofRfiles
+
+\def\Rfile{\TEXbufferfile{\Rbufferprefix\nofRfiles}}
+
+\def\startR
+ {\doglobal\increment\nofRfiles
+ \dostartbuffer[\Rbufferprefix\nofRfiles][startR][stopR]}
+
+\def\stopR
+ {\doifmode{*\v!first}\runR
+ \typefile{\Rfile.out}}
+
+\def\startRhidden
+ {\doglobal\increment\nofRfiles
+ \dostartbuffer[\Rbufferprefix\nofRfiles][startRhidden][stopRhidden]}
+
+\def\stopRhidden
+ {\doifmode{*\v!first}\runR}
+
+\def\runR
+ {\executesystemcommand{texmfstart
+ --ifchanged=\Rfile\space --direct R
+ CMD BATCH -q --save --restore \Rfile\space \Rfile.out}}
+
+\protect \doifnotmode{demo}{\endinput}
+
+% Johan's test file:
+
+\usemodule[r]
+
+\def\R{R}
+
+\setupcolors[state=start]
+
+\setuptyping
+ [Rtype]
+ [color=darkgreen]
+
+\starttext
+
+First a test of whether the workspace is persistent:
+bla
+
+\startR
+a <- "bla"
+b <- "blabla"
+ls()
+\stopR
+
+One \R run ends, another begins.
+
+\startR
+ls()
+\stopR
+
+Now follows a hidden \R run which cleans the R workspace
+
+\startRhidden
+rm(list=ls())
+save.image()
+\stopRhidden
+
+What is in the workspace now?
+
+\startR
+ls()
+\stopR
+
+Then a small test of generating a graphic, in this case a pdf
+\startR
+ushape <- c(rexp(500000), 12-rexp(500000))
+pdf("ushape.pdf")
+par(mfrow=c(1,2))
+hist(ushape)
+plot(density(ushape), main="Density")
+dev.off()
+\stopR
+
+The graphic \type{ushape.pdf} can be included in the standard \CONTEXT\ way
+\startbuffer
+\placefigure{An ugly distribution}{\externalfigure[ushape]}
+\stopbuffer
+\typebuffer
+\getbuffer
+
+\startR
+x <- rnorm(900)
+y <- rexp(900)
+# test comment
+f <- gl(9,9,900)
+summary(aov(y~x+Error(f)))
+library(lattice)
+pdf("lattice.pdf")
+xyplot(y~x|f)
+dev.off()
+\stopR
+
+With \type{Sweave} lattice graphics calls must be enclosed in
+\type{print()} statements but that is not necessary here.
+
+\startbuffer
+\placefigure[here]{Lattice graphics}{\externalfigure[lattice]}
+\stopbuffer
+\typebuffer
+\getbuffer
+
+A test string with nasty characters. In \R, the result of a statement
+is not printed by default. Enclosing the statement in parentheses,
+however causes the parser to see only the value of the statement and
+applying the \type{print()} method.
+\startR
+(test <- ".*\\\\ [[{[{]{[{[{}\]\}=?!+%#|<|>@$")
+cat(test)
+\stopR
+
+A combination
+\startbuffer
+\placefigure{A combination of two previously used graphics}{
+\startcombination[2*1]
+ {\externalfigure[ushape][width=.4\textwidth]}{The first graphic, rescaled}
+ {\externalfigure[lattice][width=.4\textwidth]}{The second graphic, rescaled}}
+\stopcombination
+\stopbuffer
+\typebuffer
+\getbuffer
+
+Testing a function definition.
+
+\startR
+a.df <- data.frame(a=1:2, b=rnorm(2))
+a.df$a
+testfunction <- function(a=NULL, ...) {
+ for(i in 1:length(a)) {
+ gsub(a[[i]], "([a-r]|[A-R])", "bla")}
+ print(a)}
+\stopR
+
+What is in the workspace now?
+
+\startR
+ls()
+\stopR
+
+\stoptext
diff --git a/Master/texmf-dist/tex/context/base/m-r.tex b/Master/texmf-dist/tex/context/base/m-r.tex
deleted file mode 100644
index ac895905ce8..00000000000
--- a/Master/texmf-dist/tex/context/base/m-r.tex
+++ /dev/null
@@ -1,174 +0,0 @@
-%D \module
-%D [ file=m-r,
-%D version=2006.06.06,
-%D title=\CONTEXT\ Modules,
-%D subtitle=R Support,
-%D author={Johan Sandblom \& Hans Hagen},
-%D date=\currentdate,
-%D copyright={Johan Sandblom \& Hans Hagen}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\unprotect
-
-%D The following R-processor is a variation on Johan Sandblom's
-%D prototype.
-%D
-%D We can combine both variants in one macro definition. Also, we
-%D can minimize the number of runs by checking for a change.
-
-%D JS: The call to R has \type {-q} in order to prevent banner,
-%D \type {--save} to make sure it saves the workspace after the run,
-%D \type {--restore} to make sure it reads any workspace from a
-%D previous session.
-
-%D An easier and better solution is to use the buffering mechanisms:
-
-\def\Rbufferprefix{r-}
-
-\newcounter\nofRfiles
-
-\def\Rfile{\TEXbufferfile{\Rbufferprefix\nofRfiles}}%
-
-\def\startR
- {\doglobal\increment\nofRfiles
- \dostartbuffer[\Rbufferprefix\nofRfiles][startR][stopR]}
-
-\def\stopR
- {\doifmode{*\v!first}\runR
- \typefile{\Rfile.out}}
-
-\def\startRhidden
- {\doglobal\increment\nofRfiles
- \dostartbuffer[\Rbufferprefix\nofRfiles][startRhidden][stopRhidden]}
-
-\def\stopRhidden
- {\doifmode{*\v!first}\runR}
-
-\def\runR
- {\executesystemcommand{texmfstart
- --ifchanged=\Rfile\space --direct R
- CMD BATCH -q --save --restore \Rfile\space \Rfile.out}}
-
-\protect \doifnotmode{demo}{\endinput}
-
-% Johan's test file:
-
-\usemodule[r]
-
-\def\R{R}
-
-\setupcolors[state=start]
-
-\setuptyping
- [Rtype]
- [color=darkgreen]
-
-\starttext
-
-First a test of whether the workspace is persistent:
-bla
-
-\startR
-a <- "bla"
-b <- "blabla"
-ls()
-\stopR
-
-One \R run ends, another begins.
-
-\startR
-ls()
-\stopR
-
-Now follows a hidden \R run which cleans the R workspace
-
-\startRhidden
-rm(list=ls())
-save.image()
-\stopRhidden
-
-What is in the workspace now?
-
-\startR
-ls()
-\stopR
-
-Then a small test of generating a graphic, in this case a pdf
-\startR
-ushape <- c(rexp(500000), 12-rexp(500000))
-pdf("ushape.pdf")
-par(mfrow=c(1,2))
-hist(ushape)
-plot(density(ushape), main="Density")
-dev.off()
-\stopR
-
-The graphic \type{ushape.pdf} can be included in the standard \CONTEXT\ way
-\startbuffer
-\placefigure{An ugly distribution}{\externalfigure[ushape]}
-\stopbuffer
-\typebuffer
-\getbuffer
-
-\startR
-x <- rnorm(900)
-y <- rexp(900)
-# test comment
-f <- gl(9,9,900)
-summary(aov(y~x+Error(f)))
-library(lattice)
-pdf("lattice.pdf")
-xyplot(y~x|f)
-dev.off()
-\stopR
-
-With \type{Sweave} lattice graphics calls must be enclosed in
-\type{print()} statements but that is not necessary here.
-
-\startbuffer
-\placefigure[here]{Lattice graphics}{\externalfigure[lattice]}
-\stopbuffer
-\typebuffer
-\getbuffer
-
-A test string with nasty characters. In \R, the result of a statement
-is not printed by default. Enclosing the statement in parentheses,
-however causes the parser to see only the value of the statement and
-applying the \type{print()} method.
-\startR
-(test <- ".*\\\\ [[{[{]{[{[{}\]\}=?!+%#|<|>@$")
-cat(test)
-\stopR
-
-A combination
-\startbuffer
-\placefigure{A combination of two previously used graphics}{
-\startcombination[2*1]
- {\externalfigure[ushape][width=.4\textwidth]}{The first graphic, rescaled}
- {\externalfigure[lattice][width=.4\textwidth]}{The second graphic, rescaled}}
-\stopcombination
-\stopbuffer
-\typebuffer
-\getbuffer
-
-Testing a function definition.
-
-\startR
-a.df <- data.frame(a=1:2, b=rnorm(2))
-a.df$a
-testfunction <- function(a=NULL, ...) {
- for(i in 1:length(a)) {
- gsub(a[[i]], "([a-r]|[A-R])", "bla")}
- print(a)}
-\stopR
-
-What is in the workspace now?
-
-\startR
-ls()
-\stopR
-
-\stoptext
diff --git a/Master/texmf-dist/tex/context/base/m-scite.mkiv b/Master/texmf-dist/tex/context/base/m-scite.mkiv
new file mode 100644
index 00000000000..aed2c26315d
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/m-scite.mkiv
@@ -0,0 +1,269 @@
+%D \module
+%D [ file=m-scite,
+%D version=2014.04.28,
+%D title=\CONTEXT\ Extra Modules,
+%D subtitle=\SCITE\ lexers,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% We can simplify the scite lexers, as long as we're able to return the
+% lexed result table and provide alexer module with the functions that
+% the lexer expects (so I need to decipher the cxx file).
+%
+% lexer._TOKENSTYLES : table
+% lexer._CHILDREN : flag
+% lexer._EXTRASTYLES : table
+% lexer._GRAMMAR : flag
+%
+% lexers.load : function
+% lexers.lex : function
+%
+% And some properties that map styles onto scintilla styling. I get the
+% impression that we end up with something simpler, a hybrid between the
+% scite lexing and the current context way, so we get an intermediate
+% step, with some penalty for context, but at least I don't have to
+% maintain two sets (three sets as we also have a line based series).
+
+% TODO: as these files are in tds we can locate them and set the lexer root
+% to that one. Currently we're on: we're on context/documents.
+
+% This is an experiment: eventually we need to hook it into the verbatim code
+% and deal with widow lines and so.
+
+\startluacode
+
+-- todo: merge with collapse
+-- todo: prehash whitespaces
+
+-- todo: hook into the pretty print code
+-- todo: a simple catcode regime with only \ { }
+
+local gsub, sub, find = string.gsub, string.sub, string.find
+local concat = table.concat
+local formatters = string.formatters
+local lpegmatch = lpeg.match
+local setmetatableindex = table.setmetatableindex
+
+local scite = require("util-sci")
+buffers.scite = scite
+
+-- context output:
+
+local f_def_color = formatters["\\definecolor[slxc%s][h=%s%s%s]%%"]
+local f_fore_none = formatters["\\def\\slx%s#1{{\\slxc%s#1}}%%"]
+local f_fore_bold = formatters["\\def\\slx%s#1{{\\slxc%s\\bf#1}}%%"]
+local f_none_bold = formatters["\\def\\slx%s#1{{\\bf#1}}%%"]
+local f_none_none = formatters["\\def\\slx%s#1{{#1}}%%"]
+local f_texstyled = formatters["\\slx%s{%s}"]
+
+local f_mapping = [[
+\let\string\slxL\string\letterleftbrace
+\let\string\slxR\string\letterrightbrace
+\let\string\slxM\string\letterdollar
+\let\string\slxV\string\letterbar
+\let\string\slxH\string\letterhash
+\let\string\slxB\string\letterbackslash
+\let\string\slxP\string\letterpercent
+\let\string\slxS\string\fixedspace
+%]]
+
+local replacer = lpeg.replacer {
+ ["{"] = "\\slxL ",
+ ["}"] = "\\slxR ",
+ ["$"] = "\\slxM ",
+ ["|"] = "\\slxV ",
+ ["#"] = "\\slxH ",
+ ["\\"] = "\\slxB ",
+ ["%"] = "\\slxP ",
+ [" "] = "\\slxS ",
+}
+
+local colors = nil
+
+local function exportcolors()
+ if not colors then
+ scite.loadscitelexer()
+ local function black(f)
+ return (f[1] == f[2]) and (f[2] == f[3]) and (f[3] == '00')
+ end
+ local result, r = { f_mapping }, 1
+ for k, v in table.sortedhash(lexer.context.styles) do
+ local fore = v.fore
+ if fore and not black(fore) then
+ r = r + 1
+ result[r] = f_def_color(k,fore[1],fore[2],fore[3])
+ end
+ end
+ r = r + 1
+ result[r] = "%"
+ for k, v in table.sortedhash(lexer.context.styles) do
+ local bold = v.bold
+ local fore = v.fore
+ r = r + 1
+ if fore and not black(fore) then
+ if bold then
+ result[r] = f_fore_bold(k,k)
+ else
+ result[r] = f_fore_none(k,k)
+ end
+ else
+ if bold then
+ result[r] = f_none_bold(k)
+ else
+ result[r] = f_none_none(k)
+ end
+ end
+ end
+ colors = concat(result,"\n")
+ end
+ return colors
+end
+
+local function exportwhites()
+ return setmetatableindex(function(t,k)
+ local v = find(k,"white") and true or false
+ t[k] = v
+ return v
+ end)
+end
+
+local function exportstyled(lexer,text)
+ local result = lexer.lex(lexer,text,0)
+ local start = 1
+ local whites = exportwhites()
+ local buffer = { }
+ for i=1,#result,2 do
+ local style = result[i]
+ local position = result[i+1]
+ local txt = sub(text,start,position-1)
+ txt = lpegmatch(replacer,txt)
+ if whites[style] then
+ buffer[#buffer+1] = txt
+ else
+ buffer[#buffer+1] = f_texstyled(style,txt)
+ end
+ start = position
+ end
+ buffer = concat(buffer)
+ return buffer
+end
+
+function scite.installcommands()
+ context(exportcolors())
+end
+
+local function lexdata(data,lexname)
+ buffers.assign("lex",exportstyled(scite.loadedlexers[lexname],data or ""))
+end
+
+scite.lexdata = lexdata
+
+function scite.lexbuffer(name,lexname)
+ lexdata(buffers.getcontent(name) or "",lexname or "tex")
+end
+
+function scite.lexfile(filename,lexname)
+ lexdata(io.loaddata(filename) or "",lexname or file.suffix(filename))
+end
+
+-- html output
+
+\stopluacode
+
+% This is a preliminary interface.
+
+\unprotect
+
+\unexpanded\def\installscitecommands
+ {\ctxlua{buffers.scite.installcommands()}%
+ \let\installscitecommands\relax}
+
+\unexpanded\def\startscite{\startlines}
+\unexpanded\def\stopscite {\stoplines}
+
+\unexpanded\def\scitefile
+ {\dosingleargument\module_scite_file}
+
+\unexpanded\def\module_scite_file[#1]%
+ {\start
+ \ctxlua{buffers.scite.lexfile("#1")}%
+ \installscitecommands
+ \tt
+ \dontcomplain
+ \startscite
+ \getbuffer[lex]%
+ \stopscite
+ \stop}
+
+\unexpanded\def\scitebuffer
+ {\dodoubleargument\module_scite_buffer}
+
+\unexpanded\def\module_scite_buffer[#1][#2]%
+ {\start
+ \ifsecondargument
+ \ctxlua{buffers.scite.lexbuffer("#2","#1")}%
+ \else
+ \ctxlua{buffers.scite.lexbuffer("#1","tex")}%
+ \fi
+ \installscitecommands
+ \tt
+ \dontcomplain
+ \startscite
+ \getbuffer[lex]%
+ \stopscite
+ \stop}
+
+\protect
+
+\continueifinputfile{m-scite.mkiv}
+
+\setupbodyfont[dejavu,8pt]
+
+\setuplayout
+ [width=middle,
+ height=middle,
+ header=1cm,
+ footer=1cm,
+ topspace=1cm,
+ bottomspace=1cm,
+ backspace=1cm]
+
+\startbuffer[demo]
+\startsubsubject[title={oeps}]
+
+\startMPcode
+ draw fullcircle
+ scaled 2cm
+ withpen pencircle scaled 1mm
+ withcolor .5green;
+ draw textext (
+ lua (
+ "local function f(s) return string.upper(s) end mp.quoted(f('foo'))"
+ )
+ ) withcolor .5red ;
+\stopMPcode
+
+\startluacode
+ context("foo")
+\stopluacode
+
+\stopsubsubject
+\stopbuffer
+
+\starttext
+
+% \scitefile[../lexers/scite-context-lexer.lua] \page
+% \scitefile[t:/manuals/about/about-metafun.tex] \page
+% \scitefile[t:/sources/strc-sec.mkiv] \page
+% \scitefile[e:/tmp/mp.w] \page
+% \scitefile[t:/manuals/hybrid/tugboat.bib] \page
+\scitefile[e:/tmp/test.bib] \page
+
+% \getbuffer[demo] \scitebuffer[demo]
+
+\stoptext
diff --git a/Master/texmf-dist/tex/context/base/m-spreadsheet.lua b/Master/texmf-dist/tex/context/base/m-spreadsheet.lua
index 9d5106e357e..1b3c5cb34f5 100644
--- a/Master/texmf-dist/tex/context/base/m-spreadsheet.lua
+++ b/Master/texmf-dist/tex/context/base/m-spreadsheet.lua
@@ -129,10 +129,10 @@ function datacell(a,b,...)
end
local function checktemplate(s)
- if find(s,"%%") then
+ if find(s,"%",1,true) then
-- normal template
return s
- elseif find(s,"@") then
+ elseif find(s,"@",1,true) then
-- tex specific template
return gsub(s,"@","%%")
else
@@ -172,7 +172,7 @@ function functions._s_(row,col,c,f,t)
for i=f,t do
local ci = c[i]
if type(ci) == "number" then
- r = r + c[i]
+ r = r + ci
end
end
return r
diff --git a/Master/texmf-dist/tex/context/base/m-spreadsheet.mkiv b/Master/texmf-dist/tex/context/base/m-spreadsheet.mkiv
index a0596899055..5e0499184bb 100644
--- a/Master/texmf-dist/tex/context/base/m-spreadsheet.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-spreadsheet.mkiv
@@ -120,12 +120,15 @@
\let\stopcell \module_spreadsheet_cell_stop
\doifassignmentelse{#1}
{\module_spreadsheet_start
+ \directsetup{spreadsheet:before:\currentspreadsheet}%
\bTABLE[\c!align=\v!flushright,#1]}
{\module_spreadsheet_start[#1]%
+ \directsetup{spreadsheet:before:\currentspreadsheet}%
\bTABLE[\c!align=\v!flushright,#2]}}
\unexpanded\def\stopspreadsheettable
{\eTABLE
+ \directsetup{spreadsheet:after:\currentspreadsheet}%
\stopspreadsheet
\egroup}
diff --git a/Master/texmf-dist/tex/context/base/m-translate.mkiv b/Master/texmf-dist/tex/context/base/m-translate.mkiv
index 363f115cb45..f36f9a9fba2 100644
--- a/Master/texmf-dist/tex/context/base/m-translate.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-translate.mkiv
@@ -89,7 +89,6 @@
\continueifinputfile{m-translate.mkiv}
-
\starttext
\translateinput[Moica][Mojca]
diff --git a/Master/texmf-dist/tex/context/base/m-zint.mkiv b/Master/texmf-dist/tex/context/base/m-zint.mkiv
index 95b265c5724..4957c846173 100644
--- a/Master/texmf-dist/tex/context/base/m-zint.mkiv
+++ b/Master/texmf-dist/tex/context/base/m-zint.mkiv
@@ -29,17 +29,21 @@ moduledata.zint = { }
local format, lower, gsub = string.format, string.lower, string.gsub
local patterns = lpeg.patterns
-local zint = "zint" -- '"c:/program files/zint/zint.exe"'
+local zint = "zint" -- '"c:/program files/zint/zint.exe"'
+local defaultcode = "PDF417"
-local whitespace = patterns.whitespace
-local spaces = whitespace^0
-local key = (spaces / "") * patterns.digit^0 * (patterns.colon * spaces / "")
-local value = (whitespace / "" + (1 - key))^1
-local pattern = lpeg.Cf(lpeg.Ct("") * (lpeg.Cg((lpeg.Cs(key) / tonumber) * (lpeg.Cs(value) / lower)) + patterns.anything)^0,rawset)
+local whitespace = patterns.whitespace
+local spaces = whitespace^0
+local key = (spaces / "") * patterns.digit^0 * (patterns.colon * spaces / "")
+local value = (whitespace / "" + (1 - key))^1
+local pattern = lpeg.Cf(lpeg.Ct("") * (lpeg.Cg((lpeg.Cs(key) / tonumber) * (lpeg.Cs(value) / lower)) + patterns.anything)^0,rawset)
local reverse
local function cleancode(code)
+ if not code or code == "" then
+ code = defaultcode
+ end
return lower(gsub(code," ",""))
end
@@ -76,6 +80,19 @@ end
\stopluacode
+\unprotect
+
+\unexpanded\def\barcode[#1]% [alternative=,text=]
+ {\bgroup
+ \getdummyparameters
+ [\c!alternative=,\c!text=,#1]%
+ \externalfigure
+ [\cldcontext{moduledata.zint.generate("\dummyparameter\c!alternative",\!!bs\dummyparameter\c!text\!!es)}]%
+ [#1,\c!alternative=,\c!text=]%
+ \egroup}
+
+\protect
+
\continueifinputfile{m-zint.mkiv}
\starttext
@@ -85,6 +102,10 @@ end
\externalfigure[\cldcontext{moduledata.zint.generate("PDF417","Ton Otten")}]
\blank
\externalfigure[\cldcontext{moduledata.zint.generate("ISBN","9789490688011")}]
+ \blank
+ \barcode[text=Does It Work?,width=\textwidth]
+ \blank
+ \barcode[alternative=isbn,text=9789490688011,width=3cm]
\stoptext
diff --git a/Master/texmf-dist/tex/context/base/math-acc.mkvi b/Master/texmf-dist/tex/context/base/math-acc.mkvi
new file mode 100644
index 00000000000..415f2b91f2d
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/math-acc.mkvi
@@ -0,0 +1,181 @@
+%D \module
+%D [ file=math-acc,
+%D version=2013.07.31,
+%D title=\CONTEXT\ Math Macros,
+%D subtitle=Accents,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Math Macros / Accents}
+
+% There are probably errors ... too distracted by amazing (piano) music videos running
+% on top of scite ... so: experimental code.
+
+\unprotect
+
+% This module permits overloading of accents so that we can do fancy things. The
+% implementation is similar to stackers. By default accenst are defined in a simple
+% way. Contrary to extensibles accents cannot grow indefinitely. Alas the
+% implementation of accents is different too, in the sense that they are
+% prepositioned i.e. are already raised. (In my opinion for no real reason as they
+% need to adapt anyway).
+%
+% $ \ruledhbox{$H$} \hat{H} \ruledhbox{$\widehat{H}$} \widehat{H} $
+%
+% One alternative is:
+%
+% \definemathoverextensible [top] [hoed] ["FE302]
+% \definemathoverextensible [top] [slang] ["FE303]
+%
+% $ \hoed{H} \ruledhbox{$\hoed{H}$} \ruledhbox{$\hoed{\tf H}$} \slang{H} $
+%
+% But that nills the italic correction (and I'm in nood to mess with that again).
+%
+% \definemathaccents [myaccents] [color=darkred]
+% \definemathtopaccent [myaccents] [mywidehat] ["0302]
+%
+% $ \hat{H} \widehat{H} \mywidehat{H} $
+
+% A first variant (kept for educational purposed):
+%
+% \installcorenamespace{mathaccents}
+%
+% \installcommandhandler \??mathaccents {mathaccent} \??mathaccents
+%
+% \let\setupmathaccents\setupmathaccent
+%
+% \setupmathaccents
+% [\c!top=,
+% \c!bottom=,
+% \c!mathstyle=,
+% \c!color=,
+% \c!command=\v!yes]
+%
+% \appendtoks
+% \edef\p_top_bottom{\namedmathaccentparameter\currentmathaccent\c!top\namedmathaccentparameter\currentmathaccent\c!bottom}%
+% \ifx\p_top_bottom\empty\else
+% \edef\p_command{\mathaccentparameter\c!command}%
+% \ifx\p_command\v!yes
+% \setuevalue\currentmathaccent{\math_accent{\currentmathaccent}}%
+% \fi
+% \fi
+% \to \everydefinemathaccent
+%
+% \def\math_accented_color_do_push{\pushcolor[\p_math_accent_color]}
+% \let\math_accented_color_do_pop \popcolor
+%
+% \unexpanded\def\math_accent#1#2%
+% {\begingroup
+% \edef\currentmathaccent{#1}%
+% \edef\p_math_accent_top {\mathaccentparameter\c!top}%
+% \edef\p_math_accent_bottom{\mathaccentparameter\c!bottom}%
+% \edef\p_math_accent_color {\mathaccentparameter\c!color}%
+% \startusemathstyleparameter\mathaccentparameter
+% \ifx\p_math_accented_color\empty
+% \let\math_accented_color_do_pop\donothing
+% \else
+% \let\math_accented_color_do_pop\popcolor
+% \math_accented_color_do_push
+% \fi
+% \ifx\p_math_accent_top\empty
+% \ifx\p_math_accent_bottom\empty
+% \else
+% \Umathaccent bottom \fam\zerocount\p_math_accent_bottom
+% \fi
+% \else
+% \ifx\p_math_accent_bottom\empty
+% \Umathaccent \fam\zerocount\p_math_accent_top
+% \else
+% \Umathaccent both \fam\zerocount\p_math_accent_top
+% \fam\zerocount\p_math_accent_bottom
+% \fi
+% \fi
+% {\popcolor#2}%
+% \stopusemathstyleparameter
+% \endgroup}
+%
+% \definemathaccent [myaccents] [\c!color=red]
+% \definemathaccent [mywidehat] [myaccents] [\c!top="0302]
+
+\installcorenamespace{mathaccents}
+
+\installcommandhandler \??mathaccents {mathaccents} \??mathaccents
+
+\setupmathaccents
+ [\c!top=,
+ \c!bottom=,
+ \c!mathstyle=,
+ \c!color=,
+ \c!command=\v!yes]
+
+\definemathaccents
+ [\v!both]
+
+\definemathaccents
+ [\v!top]
+ [\v!both]
+
+\definemathaccents
+ [\v!bottom]
+ [\v!both]
+
+\unexpanded\def\definemathtopaccent {\dotripleempty \math_accents_define_top }
+\unexpanded\def\definemathbottomaccent{\dotripleempty \math_accents_define_bottom}
+\unexpanded\def\definemathdoubleaccent{\doquadrupleempty\math_accents_define_double}
+
+\def\math_accents_define_top[#1][#2][#3]% class name top
+ {\ifthirdargument
+ \setuevalue{#2}{\math_accents_make_double {#1}\plusone{\number#3}\zerocount}%
+ \else
+ \setuevalue{#1}{\math_accents_make_double\noexpand\currentmathaccents\plusone{\number#2}\zerocount}%
+ \fi}
+
+\def\math_accents_define_bottom[#1][#2][#3]% class name bottom
+ {\ifthirdargument
+ \setuevalue{#2}{\math_accents_make_double {#1}\plustwo\zerocount{\number#3}}%
+ \else
+ \setuevalue{#1}{\math_accents_make_double\noexpand\currentmathaccents\plustwo\zerocount{\number#2}}%
+ \fi}
+
+\def\math_accents_define_double[#1][#2][#3][#4]% class name top bottom
+ {\iffourthargument
+ \setuevalue{#2}{\math_accents_make_double {#1}\plusthree{\number#3}{\number#4}}%
+ \else
+ \setuevalue{#1}{\math_accents_make_double\noexpand\currentmathaccents\plusthree{\number#2}{\number#3}}%
+ \fi}
+
+\def\math_accents_color_push_yes
+ {\pushcolor[\p_math_accent_color]%
+ \let\math_accents_color_pop\popcolor}
+
+\def\math_accents_color_push_nop
+ {\let\math_accents_color_pop\donothing}
+
+\unexpanded\def\math_accents_make_double#class#kind#top#bottom#content%
+ {\begingroup
+ \edef\currentmathaccents {#class}%
+ \edef\p_math_accent_color{\mathaccentsparameter\c!color}%
+ \startusemathstyleparameter\mathaccentsparameter
+ \ifx\p_math_accent_color\empty
+ \math_accents_color_push_nop
+ \else
+ \math_accents_color_push_yes
+ \fi
+ \ifcase#kind\or
+ \Umathaccent \fam\zerocount#top
+ \or
+ \Umathaccent bottom \fam\zerocount#bottom
+ \or
+ \Umathaccent both \fam\zerocount#top
+ \fam\zerocount#bottom
+ \fi
+ {\math_accents_color_pop#content}%
+ \stopusemathstyleparameter
+ \endgroup}
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/math-act.lua b/Master/texmf-dist/tex/context/base/math-act.lua
index 4f9b3b7e822..879480dce59 100644
--- a/Master/texmf-dist/tex/context/base/math-act.lua
+++ b/Master/texmf-dist/tex/context/base/math-act.lua
@@ -10,19 +10,24 @@ if not modules then modules = { } end modules ['math-act'] = {
local type, next = type, next
local fastcopy = table.fastcopy
+local formatters = string.formatters
-local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
-local report_math = logs.reporter("mathematics","initializing")
+local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
+local trace_collecting = false trackers.register("math.collecting", function(v) trace_collecting = v end)
-local context = context
-local commands = commands
-local mathematics = mathematics
-local texdimen = tex.dimen
-local abs = math.abs
+local report_math = logs.reporter("mathematics","initializing")
-local sequencers = utilities.sequencers
-local appendgroup = sequencers.appendgroup
-local appendaction = sequencers.appendaction
+local context = context
+local commands = commands
+local mathematics = mathematics
+local texsetdimen = tex.setdimen
+local abs = math.abs
+
+local sequencers = utilities.sequencers
+local appendgroup = sequencers.appendgroup
+local appendaction = sequencers.appendaction
+
+local fontchars = fonts.hashes.characters
local mathfontparameteractions = sequencers.new {
name = "mathparameters",
@@ -286,14 +291,104 @@ end
sequencers.appendaction("aftercopyingcharacters", "system","mathematics.overloaddimensions")
--- a couple of predefined tewaks:
+-- a couple of predefined tweaks:
local tweaks = { }
mathematics.tweaks = tweaks
-function tweaks.fixbadprime(target,original)
- target.characters[0xFE325] = target.characters[0x2032]
-end
+-- function tweaks.fixbadprime(target,original)
+-- target.characters[0xFE325] = target.characters[0x2032]
+-- end
+
+-- these could go to math-fbk
+
+-- local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap)
+-- local characters = target.characters
+-- -- if not characters[newchr] then -- xits needs an enforce
+-- local addprivate = fonts.helpers.addprivate
+-- local olddata = characters[oldchr]
+-- if olddata then
+-- if swap then
+-- swap = characters[swap]
+-- height = swap.depth
+-- depth = 0
+-- else
+-- height = height or 0
+-- depth = depth or 0
+-- end
+-- local correction = swap and { "down", (olddata.height or 0) - height } or { "down", olddata.height }
+-- local newdata = {
+-- commands = { correction, { "slot", 1, oldchr } },
+-- width = olddata.width,
+-- height = height,
+-- depth = depth,
+-- }
+-- characters[newchr] = newdata
+-- local nextglyph = olddata.next
+-- while nextglyph do
+-- local oldnextdata = characters[nextglyph]
+-- local newnextdata = {
+-- commands = { correction, { "slot", 1, nextglyph } },
+-- width = oldnextdata.width,
+-- height = height,
+-- depth = depth,
+-- }
+-- local newnextglyph = addprivate(target,formatters["original-%H"](nextglyph),newnextdata)
+-- newdata.next = newnextglyph
+-- local nextnextglyph = oldnextdata.next
+-- if nextnextglyph == nextglyph then
+-- break
+-- else
+-- olddata = oldnextdata
+-- newdata = newnextdata
+-- nextglyph = nextnextglyph
+-- end
+-- end
+-- local hv = olddata.horiz_variants
+-- if hv then
+-- hv = fastcopy(hv)
+-- newdata.horiz_variants = hv
+-- for i=1,#hv do
+-- local hvi = hv[i]
+-- local oldglyph = hvi.glyph
+-- local olddata = characters[oldglyph]
+-- local newdata = {
+-- commands = { correction, { "slot", 1, oldglyph } },
+-- width = olddata.width,
+-- height = height,
+-- depth = depth,
+-- }
+-- hvi.glyph = addprivate(target,formatters["original-%H"](oldglyph),newdata)
+-- end
+-- end
+-- end
+-- -- end
+-- end
+
+-- function tweaks.fixoverline(target,original)
+-- local height, depth = 0, 0
+-- local mathparameters = target.mathparameters
+-- if mathparameters then
+-- height = mathparameters.OverbarVerticalGap
+-- depth = mathparameters.UnderbarVerticalGap
+-- else
+-- height = target.parameters.xheight/4
+-- depth = height
+-- end
+-- accent_to_extensible(target,0x203E,original,0x0305,height,depth)
+-- -- also crappy spacing for our purpose: push to top of baseline
+-- accent_to_extensible(target,0xFE3DE,original,0x23DE,height,depth,0x23DF)
+-- accent_to_extensible(target,0xFE3DC,original,0x23DC,height,depth,0x23DD)
+-- accent_to_extensible(target,0xFE3B4,original,0x23B4,height,depth,0x23B5)
+-- -- for symmetry
+-- target.characters[0xFE3DF] = original.characters[0x23DF]
+-- target.characters[0xFE3DD] = original.characters[0x23DD]
+-- target.characters[0xFE3B5] = original.characters[0x23B5]
+-- -- inspect(fonts.helpers.expandglyph(target.characters,0x203E))
+-- -- inspect(fonts.helpers.expandglyph(target.characters,0x23DE))
+-- end
+
+-- sequencers.appendaction("aftercopyingcharacters", "system","mathematics.tweaks.fixoverline") -- for the moment always
-- helpers
@@ -301,6 +396,7 @@ local setmetatableindex = table.setmetatableindex
local family_font = node.family_font
local fontcharacters = fonts.hashes.characters
+local fontdescriptions = fonts.hashes.descriptions
local extensibles = utilities.storage.allocate()
fonts.hashes.extensibles = extensibles
@@ -324,24 +420,34 @@ local function extensiblecode(font,unicode)
if not character then
return unknown
end
+ local first = character.next
local code = unicode
- local next = character.next
+ local next = first
while next do
code = next
character = characters[next]
next = character.next
end
local char = chardata[unicode]
- local mathextensible = char and char.mathextensible
+ if not char then
+ return unknown
+ end
if character.horiz_variants then
if character.vert_variants then
return { e_mixed, code, character }
else
- local e = mathextensible and extensibles[mathextensible]
+ local m = char.mathextensible
+ local e = m and extensibles[m]
return e and { e, code, character } or unknown
end
elseif character.vert_variants then
- local e = mathextensible and extensibles[mathextensible]
+ local m = char.mathextensible
+ local e = m and extensibles[m]
+ return e and { e, code, character } or unknown
+ elseif first then
+ -- assume accent (they seldom stretch .. sizes)
+ local m = char.mathextensible or char.mathstretch
+ local e = m and extensibles[m]
return e and { e, code, character } or unknown
else
return unknown
@@ -374,31 +480,238 @@ end
-- abs(right["start"] - right["end"]) | right.advance | characters[right.glyph].width
function commands.horizontalcode(family,unicode)
- local font = family_font(family or 0)
- local data = extensibles[font][unicode]
- local kind = data[1]
+ local font = family_font(family or 0)
+ local data = extensibles[font][unicode]
+ local kind = data[1]
+ local loffset = 0
+ local roffset = 0
if kind == e_left then
local charlist = data[3].horiz_variants
- local characters = fontcharacters[font]
- local left = charlist[1]
- texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0))
- texdimen.scratchrightoffset = 0
+ if charlist then
+ local left = charlist[1]
+ loffset = abs((left["start"] or 0) - (left["end"] or 0))
+ end
elseif kind == e_right then
local charlist = data[3].horiz_variants
- local characters = fontcharacters[font]
local right = charlist[#charlist]
- texdimen.scratchleftoffset = 0
- texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0))
+ roffset = abs((right["start"] or 0) - (right["end"] or 0))
elseif kind == e_horizontal then
local charlist = data[3].horiz_variants
- local characters = fontcharacters[font]
- local left = charlist[1]
- local right = charlist[#charlist]
- texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0))
- texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0))
+ if charlist then
+ local left = charlist[1]
+ local right = charlist[#charlist]
+ loffset = abs((left ["start"] or 0) - (left ["end"] or 0))
+ roffset = abs((right["start"] or 0) - (right["end"] or 0))
+ end
else
- texdimen.scratchleftoffset = 0
- texdimen.scratchrightoffset = 0
end
+ texsetdimen("scratchleftoffset",loffset)
+ texsetdimen("scratchrightoffset",roffset)
context(kind)
end
+
+-- experiment
+
+-- check: when true, only set when present in font
+-- force: when false, then not set when already set
+
+local blocks = characters.blocks -- this will move to char-ini
+
+blocks["uppercasenormal"] = { first = 0x00041, last = 0x0005A }
+blocks["uppercasebold"] = { first = 0x1D400, last = 0x1D419 }
+blocks["uppercaseitalic"] = { first = 0x1D434, last = 0x1D44D }
+blocks["uppercasebolditalic"] = { first = 0x1D468, last = 0x1D481 }
+blocks["uppercasescript"] = { first = 0x1D49C, last = 0x1D4B5 }
+blocks["uppercaseboldscript"] = { first = 0x1D4D0, last = 0x1D4E9 }
+blocks["uppercasefraktur"] = { first = 0x1D504, last = 0x1D51D }
+blocks["uppercasedoublestruck"] = { first = 0x1D538, last = 0x1D551 }
+blocks["uppercaseboldfraktur"] = { first = 0x1D56C, last = 0x1D585 }
+blocks["uppercasesansserifnormal"] = { first = 0x1D5A0, last = 0x1D5B9 }
+blocks["uppercasesansserifbold"] = { first = 0x1D5D4, last = 0x1D5ED }
+blocks["uppercasesansserifitalic"] = { first = 0x1D608, last = 0x1D621 }
+blocks["uppercasesansserifbolditalic"] = { first = 0x1D63C, last = 0x1D655 }
+blocks["uppercasemonospace"] = { first = 0x1D670, last = 0x1D689 }
+blocks["uppercasegreeknormal"] = { first = 0x00391, last = 0x003AA }
+blocks["uppercasegreekbold"] = { first = 0x1D6A8, last = 0x1D6C1 }
+blocks["uppercasegreekitalic"] = { first = 0x1D6E2, last = 0x1D6FB }
+blocks["uppercasegreekbolditalic"] = { first = 0x1D71C, last = 0x1D735 }
+blocks["uppercasegreeksansserifbold"] = { first = 0x1D756, last = 0x1D76F }
+blocks["uppercasegreeksansserifbolditalic"] = { first = 0x1D790, last = 0x1D7A9 }
+
+blocks["lowercasenormal"] = { first = 0x00061, last = 0x0007A }
+blocks["lowercasebold"] = { first = 0x1D41A, last = 0x1D433 }
+blocks["lowercaseitalic"] = { first = 0x1D44E, last = 0x1D467 }
+blocks["lowercasebolditalic"] = { first = 0x1D482, last = 0x1D49B }
+blocks["lowercasescript"] = { first = 0x1D4B6, last = 0x1D4CF }
+blocks["lowercaseboldscript"] = { first = 0x1D4EA, last = 0x1D503 }
+blocks["lowercasefraktur"] = { first = 0x1D51E, last = 0x1D537 }
+blocks["lowercasedoublestruck"] = { first = 0x1D552, last = 0x1D56B }
+blocks["lowercaseboldfraktur"] = { first = 0x1D586, last = 0x1D59F }
+blocks["lowercasesansserifnormal"] = { first = 0x1D5BA, last = 0x1D5D3 }
+blocks["lowercasesansserifbold"] = { first = 0x1D5EE, last = 0x1D607 }
+blocks["lowercasesansserifitalic"] = { first = 0x1D622, last = 0x1D63B }
+blocks["lowercasesansserifbolditalic"] = { first = 0x1D656, last = 0x1D66F }
+blocks["lowercasemonospace"] = { first = 0x1D68A, last = 0x1D6A3 }
+blocks["lowercasegreeknormal"] = { first = 0x003B1, last = 0x003CA }
+blocks["lowercasegreekbold"] = { first = 0x1D6C2, last = 0x1D6DB }
+blocks["lowercasegreekitalic"] = { first = 0x1D6FC, last = 0x1D715 }
+blocks["lowercasegreekbolditalic"] = { first = 0x1D736, last = 0x1D74F }
+blocks["lowercasegreeksansserifbold"] = { first = 0x1D770, last = 0x1D789 }
+blocks["lowercasegreeksansserifbolditalic"] = { first = 0x1D7AA, last = 0x1D7C3 }
+
+blocks["digitsnormal"] = { first = 0x00030, last = 0x00039 }
+blocks["digitsbold"] = { first = 0x1D7CE, last = 0x1D7D8 }
+blocks["digitsdoublestruck"] = { first = 0x1D7D8, last = 0x1D7E2 }
+blocks["digitssansserifnormal"] = { first = 0x1D7E2, last = 0x1D7EC }
+blocks["digitssansserifbold"] = { first = 0x1D7EC, last = 0x1D805 }
+blocks["digitsmonospace"] = { first = 0x1D7F6, last = 0x1D80F }
+
+blocks["mathematicaloperators"] = { first = 0x02200, last = 0x022FF }
+blocks["miscellaneousmathematicalsymbolsa"] = { first = 0x027C0, last = 0x027EF }
+blocks["miscellaneousmathematicalsymbolsb"] = { first = 0x02980, last = 0x029FF }
+blocks["supplementalmathematicaloperators"] = { first = 0x02A00, last = 0x02AFF }
+blocks["letterlikesymbols"] = { first = 0x02100, last = 0x0214F }
+blocks["miscellaneoustechnical"] = { first = 0x02308, last = 0x0230B }
+blocks["geometricshapes"] = { first = 0x025A0, last = 0x025FF }
+blocks["miscellaneoussymbolsandarrows"] = { first = 0x02B30, last = 0x02B4C }
+blocks["mathematicalalphanumericsymbols"] = { first = 0x00400, last = 0x1D7FF }
+
+blocks["digitslatin"] = { first = 0x00030, last = 0x00039 }
+blocks["digitsarabicindic"] = { first = 0x00660, last = 0x00669 }
+blocks["digitsextendedarabicindic"] = { first = 0x006F0, last = 0x006F9 }
+------["digitsdevanagari"] = { first = 0x00966, last = 0x0096F }
+------["digitsbengali"] = { first = 0x009E6, last = 0x009EF }
+------["digitsgurmukhi"] = { first = 0x00A66, last = 0x00A6F }
+------["digitsgujarati"] = { first = 0x00AE6, last = 0x00AEF }
+------["digitsoriya"] = { first = 0x00B66, last = 0x00B6F }
+------["digitstamil"] = { first = 0x00030, last = 0x00039 } -- no zero
+------["digitstelugu"] = { first = 0x00C66, last = 0x00C6F }
+------["digitskannada"] = { first = 0x00CE6, last = 0x00CEF }
+------["digitsmalayalam"] = { first = 0x00D66, last = 0x00D6F }
+------["digitsthai"] = { first = 0x00E50, last = 0x00E59 }
+------["digitslao"] = { first = 0x00ED0, last = 0x00ED9 }
+------["digitstibetan"] = { first = 0x00F20, last = 0x00F29 }
+------["digitsmyanmar"] = { first = 0x01040, last = 0x01049 }
+------["digitsethiopic"] = { first = 0x01369, last = 0x01371 }
+------["digitskhmer"] = { first = 0x017E0, last = 0x017E9 }
+------["digitsmongolian"] = { first = 0x01810, last = 0x01809 }
+
+-- operators : 0x02200
+-- symbolsa : 0x02701
+-- symbolsb : 0x02901
+-- supplemental : 0x02A00
+
+-- from mathematics.gaps:
+
+blocks["lowercaseitalic"].gaps = {
+ [0x1D455] = 0x0210E, -- ℎ h
+}
+
+blocks["uppercasescript"].gaps = {
+ [0x1D49D] = 0x0212C, -- ℬ script B
+ [0x1D4A0] = 0x02130, -- ℰ script E
+ [0x1D4A1] = 0x02131, -- ℱ script F
+ [0x1D4A3] = 0x0210B, -- ℋ script H
+ [0x1D4A4] = 0x02110, -- ℐ script I
+ [0x1D4A7] = 0x02112, -- ℒ script L
+ [0x1D4A8] = 0x02133, -- ℳ script M
+ [0x1D4AD] = 0x0211B, -- ℛ script R
+}
+
+blocks["lowercasescript"].gaps = {
+ [0x1D4BA] = 0x0212F, -- ℯ script e
+ [0x1D4BC] = 0x0210A, -- ℊ script g
+ [0x1D4C4] = 0x02134, -- ℴ script o
+}
+
+blocks["uppercasefraktur"].gaps = {
+ [0x1D506] = 0x0212D, -- ℭ fraktur C
+ [0x1D50B] = 0x0210C, -- ℌ fraktur H
+ [0x1D50C] = 0x02111, -- ℑ fraktur I
+ [0x1D515] = 0x0211C, -- ℜ fraktur R
+ [0x1D51D] = 0x02128, -- ℨ fraktur Z
+}
+
+blocks["uppercasedoublestruck"].gaps = {
+ [0x1D53A] = 0x02102, -- ℂ bb C
+ [0x1D53F] = 0x0210D, -- ℍ bb H
+ [0x1D545] = 0x02115, -- ℕ bb N
+ [0x1D547] = 0x02119, -- ℙ bb P
+ [0x1D548] = 0x0211A, -- ℚ bb Q
+ [0x1D549] = 0x0211D, -- ℝ bb R
+ [0x1D551] = 0x02124, -- ℤ bb Z
+}
+
+-- todo: tounicode
+
+function mathematics.injectfallbacks(target,original)
+ local properties = original.properties
+ if properties and properties.hasmath then
+ local specification = target.specification
+ if specification then
+ local fallbacks = specification.fallbacks
+ if fallbacks then
+ local definitions = fonts.collections.definitions[fallbacks]
+ if definitions then
+ if trace_collecting then
+ report_math("adding fallback characters to font %a",specification.hash)
+ end
+ local definedfont = fonts.definers.internal
+ local copiedglyph = fonts.handlers.vf.math.copy_glyph
+ local fonts = target.fonts
+ local size = specification.size -- target.size
+ local characters = target.characters
+ if not fonts then
+ fonts = { }
+ target.fonts = fonts
+ target.type = "virtual"
+ target.properties.virtualized = true
+ end
+ if #fonts == 0 then
+ fonts[1] = { id = 0, size = size } -- sel, will be resolved later
+ end
+ local done = { }
+ for i=1,#definitions do
+ local definition = definitions[i]
+ local name = definition.font
+ local start = definition.start
+ local stop = definition.stop
+ local gaps = definition.gaps
+ local check = definition.check
+ local force = definition.force
+ local rscale = definition.rscale or 1
+ local offset = definition.offset or start
+ local id = definedfont { name = name, size = size * rscale }
+ local index = #fonts + 1
+ fonts[index] = { id = id, size = size }
+ local chars = fontchars[id]
+ local function remap(unic,unicode,gap)
+ local unic = unicode + offset - start
+ if check and not chars[unicode] then
+ -- not in font
+ elseif force or (not done[unic] and not characters[unic]) then
+ if trace_collecting then
+ report_math("remapping math character, vector %a, font %a, character %C%s%s",
+ fallbacks,name,unic,check and ", checked",gap and ", gap plugged")
+ end
+ characters[unic] = copiedglyph(target,characters,chars,unicode,index)
+ done[unic] = true
+ end
+ end
+ for unicode = start, stop do
+ local unic = unicode + offset - start
+ remap(unic,unicode,false)
+ end
+ if gaps then
+ for unic, unicode in next, gaps do
+ remap(unic,unicode,true)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+sequencers.appendaction("aftercopyingcharacters", "system","mathematics.injectfallbacks")
diff --git a/Master/texmf-dist/tex/context/base/math-ali.mkiv b/Master/texmf-dist/tex/context/base/math-ali.mkiv
index db960de26eb..6bfde57b6f3 100644
--- a/Master/texmf-dist/tex/context/base/math-ali.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-ali.mkiv
@@ -25,7 +25,7 @@
%D Modules may provide additional alignment features. The following
%D mechanisms are provided by the core.
-% n>1 #### needed, strange # interaction in recurse
+% n>1 ### needed, strange # interaction in recurse
\newtoks\c_math_align_a
\newtoks\c_math_align_b
@@ -39,9 +39,9 @@
\def\math_build_eqalign_step
{\ifnum\recurselevel>\plusone
%\appendtoks
- % \tabskip\mathalignmentparameter\c!distance&\tabskip\zeropoint
+ % \tabskip\mathalignmentparameter\c!distance\aligntab\tabskip\zeropoint
%\to\scratchtoks
- \scratchtoks\expandafter{\the\scratchtoks\tabskip\mathalignmentparameter\c!distance&\tabskip\zeropoint}%
+ \scratchtoks\expandafter{\the\scratchtoks\tabskip\mathalignmentparameter\c!distance\aligntab\tabskip\zeropoint}%
\fi
\normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_a}}%
\dorecurse{\numexpr\mathalignmentparameter\c!n-\plusone\relax}
@@ -56,9 +56,9 @@
{\emptyhbox
\mskip\thinmuskip
\vcenter
- {\openup\displayopenupvalue % was: \openup\jot
+ {\math_openup\displayopenupvalue % was: \openup\jot
\mathsurround\zeropoint
- \ialign{\strut\hfil$\displaystyle{##}$&$\displaystyle{{}##{}}$\hfil\crcr#1\crcr}}%
+ \ialign{\strut\hfil$\displaystyle{\alignmark\alignmark}$\aligntab$\displaystyle{{}\alignmark\alignmark{}}$\hfil\crcr#1\crcr}}%
\mskip\thinmuskip}
% preamble is scanned for tabskips so we need the span to prevent an error message
@@ -68,14 +68,14 @@
% use zeroskipplusfill
\def\math_prepare_r_eqalign_no
- {\c_math_align_a{\strut\math_first_in_eqalign\hfil\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}%
- \c_math_align_b{&\math_next_in_eqalign\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}%
+ {\c_math_align_a{\strut\math_first_in_eqalign\hfil\math_left_of_equalign\span\math_math_in_eqalign{\alignmark\alignmark}\math_right_of_eqalign\tabskip\zeropoint}%
+ \c_math_align_b{\aligntab\math_next_in_eqalign\math_left_of_equalign\span\math_math_in_eqalign{\alignmark\alignmark}\math_right_of_eqalign\tabskip\zeropoint}%
\ifnum\mathraggedstatus=\plusone
- \c_math_align_c{\hfil&\span\math_text_in_eqalign{##}\tabskip\zeropoint}%
+ \c_math_align_c{\hfil\aligntab\span\math_text_in_eqalign{\alignmark\alignmark}\tabskip\zeropoint}%
\else\ifnum\mathraggedstatus=\plusthree
- \c_math_align_c{\hfil\tabskip\zeropoint\s!plus 1\s!fill&\span\math_text_in_eqalign{##}\tabskip\zeropoint}%
+ \c_math_align_c{\hfil\tabskip\zeropoint\s!plus 1\s!fill\aligntab\span\math_text_in_eqalign{\alignmark\alignmark}\tabskip\zeropoint}%
\else
- \c_math_align_c{\hfil\tabskip\centering&\llap{\span\math_text_in_eqalign{##}}\tabskip\zeropoint}%
+ \c_math_align_c{\hfil\tabskip\centering\aligntab\llap{\span\math_text_in_eqalign{\alignmark\alignmark}}\tabskip\zeropoint}%
\fi\fi
\global\mathnumberstatus\zerocount
\math_build_eqalign
@@ -83,15 +83,15 @@
\tabskip\centering}
\def\math_prepare_l_eqalign_no
- {\c_math_align_a{\strut\math_first_in_eqalign\hfil\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}%
- \c_math_align_b{&\math_next_in_eqalign\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}%
+ {\c_math_align_a{\strut\math_first_in_eqalign\hfil\math_left_of_equalign\span\math_math_in_eqalign{\alignmark\alignmark}\math_right_of_eqalign\tabskip\zeropoint}%
+ \c_math_align_b{\aligntab\math_next_in_eqalign\math_left_of_equalign\span\math_math_in_eqalign{\alignmark\alignmark}\math_right_of_eqalign\tabskip\zeropoint}%
% problem: number is handled after rest and so ends up in the margin
\ifnum\mathraggedstatus=\plusone
- \c_math_align_c{\hfil&\kern-\displaywidth\rlap{\span\math_text_in_eqalign{##}}\tabskip\displaywidth}%
+ \c_math_align_c{\hfil\aligntab\kern-\displaywidth\rlap{\span\math_text_in_eqalign{\alignmark\alignmark}}\tabskip\displaywidth}%
\else\ifnum\mathraggedstatus=\plusthree
- \c_math_align_c{\hfil\tabskip\zeropoint\s!plus 1\s!fill&\kern-\displaywidth\span\math_rlap{\span\math_text_in_eqalign{##}}\tabskip\displaywidth}%
+ \c_math_align_c{\hfil\tabskip\zeropoint\s!plus 1\s!fill\aligntab\kern-\displaywidth\span\math_rlap{\span\math_text_in_eqalign{\alignmark\alignmark}}\tabskip\displaywidth}%
\else
- \c_math_align_c{\hfil\tabskip\centering&\kern-\displaywidth\rlap{\span\math_text_in_eqalign{##}}\tabskip\displaywidth}%
+ \c_math_align_c{\hfil\tabskip\centering\aligntab\kern-\displaywidth\rlap{\span\math_text_in_eqalign{\alignmark\alignmark}}\tabskip\displaywidth}%
\fi\fi
\global\mathnumberstatus\zerocount
\math_build_eqalign
@@ -192,7 +192,7 @@
\c_math_eqalign_column\zerocount
\processcommacommand
[\mathalignmentparameter\c!align]
- {\advance\c_math_eqalign_column\plusone\doseteqaligncolumn}% takes argument
+ {\advance\c_math_eqalign_column\plusone\math_eqalign_set_column}% takes argument
\global\c_math_eqalign_column\plusone
\dostarttagged\t!math\empty
\dostarttagged\t!mathtable\currentmathalignment
@@ -286,7 +286,7 @@
\fi
\fi}
-\def\doseteqaligncolumn#1% we could just add to the preamble (as with other alignments)
+\def\math_eqalign_set_column#1% we could just add to the preamble (as with other alignments)
{\expandafter\let\csname\??mathalignmentvariant\number\c_math_eqalign_column\expandafter\endcsname
\csname\??mathalignmentvariant\ifcsname\??mathalignmentvariant#1\endcsname#1\else\v!normal\fi\endcsname}
@@ -594,6 +594,8 @@
\setvalue {\e!stop \currentmathmatrix}{\math_matrix_stop}% no u else lookahead problem
\to \everydefinemathmatrix
+\let\math_matrix_NC\relax
+
\unexpanded\def\math_matrix_start#1%
{\begingroup
\edef\currentmathmatrix{#1}%
@@ -607,12 +609,12 @@
\math_matrix_align_method_analyze
\mathmatrixleft
\mathmatrixbox\bgroup
- \pushmacro\domatrixNC
+ \pushmacro\math_matrix_NC
\let\endmath\relax
- \def\NC{\domatrixNC}%
- \def\MC{\domatrixNC\ifmmode\else$\def\endmath{$}\fi}%
- \global\let\domatrixNC\dodomatrixNC
- \def\NR{\endmath\global\let\domatrixNC\dodomatrixNC\crcr}%
+ \def\NC{\math_matrix_NC}%
+ \def\MC{\math_matrix_NC\ifmmode\else\startimath\let\endmath\stopimath\fi}%
+ \global\let\math_matrix_NC\math_matrix_NC_indeed
+ \def\NR{\endmath\global\let\math_matrix_NC\math_matrix_NC_indeed\crcr}%
\normalbaselines
\mathsurround\zeropoint
\everycr\emptytoks
@@ -620,8 +622,8 @@
\c_math_eqalign_column\zerocount
\processcommacommand
[\mathmatrixparameter\c!align]
- {\advance\c_math_eqalign_column\plusone\doseteqaligncolumn}% was \dosetmatrixcolumn
- \scratchcounter=\ifnum\c_math_eqalign_column>\zerocount \c_math_eqalign_column \else \plusone \fi
+ {\advance\c_math_eqalign_column\plusone\math_eqalign_set_column}%
+ \scratchcounter\ifnum\c_math_eqalign_column>\zerocount \c_math_eqalign_column \else \plusone \fi
\global\c_math_eqalign_column\plusone
\math_matrix_prepare}
@@ -630,7 +632,7 @@
\mathstrut\crcr
\noalign{\kern-\baselineskip}%
\egroup
- \popmacro\domatrixNC
+ \popmacro\math_matrix_NC
\egroup
\mathmatrixright
\endgroup}
@@ -640,13 +642,13 @@
\def\math_matrix_prepare
{\c_math_align_a{\strut\math_first_in_eqalign\math_left_of_equalign\span
- \math_text_in_eqalign{\mathmatrixparameter\c!style##}\math_right_of_eqalign}%
- \c_math_align_b{&\hskip\mathmatrixparameter\c!distance
+ \math_text_in_eqalign{\mathmatrixparameter\c!style\alignmark\alignmark}\math_right_of_eqalign}%
+ \c_math_align_b{\aligntab\hskip\mathmatrixparameter\c!distance
\math_next_in_eqalign\math_left_of_equalign\span
- \math_text_in_eqalign{\mathmatrixparameter\c!style##}\math_right_of_eqalign}%
- \c_math_align_c{&&\hskip\mathmatrixparameter\c!distance
+ \math_text_in_eqalign{\mathmatrixparameter\c!style\alignmark\alignmark}\math_right_of_eqalign}%
+ \c_math_align_c{\aligntab\aligntab\hskip\mathmatrixparameter\c!distance
\math_left_of_equalign\span
- \math_text_in_eqalign{\mathmatrixparameter\c!style##}\math_right_of_eqalign}%
+ \math_text_in_eqalign{\mathmatrixparameter\c!style\alignmark\alignmark}\math_right_of_eqalign}%
\scratchtoks\emptytoks
\normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_a}}%
\dorecurse{\numexpr\scratchcounter-\plusone\relax}
@@ -654,8 +656,8 @@
\normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_c}}%
\halign \expandafter \bgroup\the\scratchtoks \crcr}
-\unexpanded\def\dodomatrixNC
- {\gdef\domatrixNC{\endmath&}}
+\unexpanded\def\math_matrix_NC_indeed
+ {\gdef\math_matrix_NC{\endmath\aligntab}}
\installcorenamespace{mathmatrixalignmethod}
@@ -877,16 +879,16 @@
\lineskip\mathstackvgap
\lineskiplimit\lineskip
\let\stopmathmode\relax
- \def\NC{\domatrixNC}%
- \def\MC{\domatrixNC\startmathmode}%
- \global\let\domatrixNC\dodomatrixNC
+ \def\NC{\math_matrix_NC}%
+ \def\MC{\math_matrix_NC\startmathmode}%
+ \global\let\math_matrix_NC\math_matrix_NC_indeed
\def\NR
{\stopmathmode
- \global\let\domatrixNC\dodomatrixNC
+ \global\let\math_matrix_NC\math_matrix_NC_indeed
\crcr}%
\mathsurround\zeropoint
\everycr\emptytoks
- \halign\bgroup\hfil$\scriptstyle##$\hfil\crcr}
+ \halign\bgroup\hfil$\scriptstyle\alignmark\alignmark$\hfil\crcr}
\def\stopsubstack
{\crcr
@@ -978,8 +980,11 @@
\installcorenamespace{mathinnerstart}
\installcorenamespace{mathinnerstop}
-\unexpanded\def\startinnermath{\csname\??mathinnerstart\formulaparameter\c!align\endcsname}
-\unexpanded\def\stopinnermath {\csname\??mathinnerstop \formulaparameter\c!align\endcsname}
+% \unexpanded\def\startinnermath{\csname\??mathinnerstart\formulaparameter\c!align\endcsname}
+% \unexpanded\def\stopinnermath {\csname\??mathinnerstop \formulaparameter\c!align\endcsname}
+
+\unexpanded\def\startinnermath{\expandnamespaceparameter\??mathinnerstart\formulaparameter\c!align\v!normal}
+\unexpanded\def\stopinnermath {\expandnamespaceparameter\??mathinnerstop \formulaparameter\c!align\v!normal}
\unexpanded\def\mathinnerstrut
{\doif{\formulaparameter\c!strut}\v!yes\strut}
@@ -1088,6 +1093,9 @@
\defineinnermathhandler\v!flushleft {\startmathbox\plusthree}{\stopmathbox}
\defineinnermathhandler\v!center {\startmathbox\plustwo }{\stopmathbox}
\defineinnermathhandler\v!flushright{\startmathbox\plusone }{\stopmathbox}
+\defineinnermathhandler\v!normal {} {}
+
+%defineinnermathhandler\v!normal {\startmathbox\plustwo }{\stopmathbox}
%D [The examples below are in english and don't process in the
%D documentation style, which will be english some day.]
diff --git a/Master/texmf-dist/tex/context/base/math-def.mkiv b/Master/texmf-dist/tex/context/base/math-def.mkiv
index 9201fc5404a..25098695902 100644
--- a/Master/texmf-dist/tex/context/base/math-def.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-def.mkiv
@@ -15,7 +15,7 @@
\unprotect
-% this will be done at the lua end
+%D Some day this will be done at the lua end.
\startluacode
mathematics.define(\number\defaultmathfamily)
@@ -24,22 +24,8 @@
\activatemathcharacters
-% will go to math-ext (if used at all)
-
-\Umathchardef\braceld=0 \defaultmathfamily "FF07A
-\Umathchardef\bracerd=0 \defaultmathfamily "FF07B
-\Umathchardef\bracelu=0 \defaultmathfamily "FF07C
-\Umathchardef\braceru=0 \defaultmathfamily "FF07D
-
-% ctx specific
-
-% \bgroup
-% \catcode`|=\othercatcode
-% \global\let\|=|
-% \egroup
-
-% The \mfunction macro is an alternative for \hbox with a
-% controlable font switch.
+%D The \mfunction macro is an alternative for \hbox with a controlable font
+%D switch.
\definemathcommand [arccos] [nolop] {\mfunctionlabeltext{arccos}}
\definemathcommand [arcsin] [nolop] {\mfunctionlabeltext{arcsin}}
@@ -69,7 +55,7 @@
\definemathcommand [max] [limop] {\mfunctionlabeltext{max}}
\definemathcommand [min] [limop] {\mfunctionlabeltext{min}}
\definemathcommand [mod] [limop] {\mfunctionlabeltext{mod}}
-%definemathcommand [div] [limop] {\mfunctionlabeltext{div}} % overloads \div symbol
+%definemathcommand [div] [limop] {\mfunctionlabeltext{div}} % overloads \div symbol
\definemathcommand [projlim] [limop] {\mfunctionlabeltext{projlim}}
\definemathcommand [Pr] [limop] {\mfunctionlabeltext{Pr}}
\definemathcommand [sec] [nolop] {\mfunctionlabeltext{sec}}
@@ -79,157 +65,23 @@
\definemathcommand [tanh] [nolop] {\mfunctionlabeltext{tanh}}
\definemathcommand [tan] [nolop] {\mfunctionlabeltext{tan}}
-% \definemathcommand [integers] {{\mathblackboard Z}}
-% \definemathcommand [reals] {{\mathblackboard R}}
-% \definemathcommand [rationals] {{\mathblackboard Q}}
-% \definemathcommand [naturalnumbers]{{\mathblackboard N}}
-% \definemathcommand [complexes] {{\mathblackboard C}}
-% \definemathcommand [primes] {{\mathblackboard P}}
-
\let\normalmatharg\arg % todo: maybe automatically
-% using attributes
-
-\setnewconstant\bigmathdelimitermethod\plusone
-
-\def\plainbigdelimiters % traditional method
- {\bigmathdelimitermethod\plustwo}
-
-\plainbigdelimiters % is default for the moment but not so nice
-
-\def\doplainbigmath#1#2%
- {{\hbox{$%
- \nulldelimiterspace\zeropoint\relax
- \mathsurround\zeropoint
- $}}}
-
-\def\doleftbigmath #1{\ifx#1\relax\else\left#1\expandafter\doleftbigmath \fi}
-\def\dorightbigmath#1{\ifx#1\relax\else\right.\expandafter\dorightbigmath\fi}
-
-\installcorenamespace{mathbig}
-
-\unexpanded\def\choosemathbig#1#2% so we accent \big{||} as well
- {{\hbox{$%
- \ifcase\bigmathdelimitermethod
- \doleftbigmath#2\relax
- \dorightbigmath#2\relax
- \or
- \attribute\mathsizeattribute#1\relax
- \doleftbigmath#2\relax
- \dorightbigmath#2\relax
- \else
- \doleftbigmath#2\relax
- \vbox to\getvalue{\??mathbig\number#1}\bodyfontsize{}%
- \dorightbigmath#2\relax
- \fi
- \nulldelimiterspace\zeropoint\relax
- \mathsurround\zeropoint
- $}}}
-
-\definemathcommand [big] {\choosemathbig\plusone } \setvalue{\??mathbig1}{0.85}
-\definemathcommand [Big] {\choosemathbig\plustwo } \setvalue{\??mathbig2}{1.15}
-\definemathcommand [bigg] {\choosemathbig\plusthree} \setvalue{\??mathbig3}{1.45}
-\definemathcommand [Bigg] {\choosemathbig\plusfour } \setvalue{\??mathbig4}{1.75}
-
-\definemathcommand [bigl] [open] [one] {\big}
-\definemathcommand [bigm] [rel] [one] {\big}
-\definemathcommand [bigr] [close] [one] {\big}
-\definemathcommand [Bigl] [open] [one] {\Big}
-\definemathcommand [Bigm] [rel] [one] {\Big}
-\definemathcommand [Bigr] [close] [one] {\Big}
-\definemathcommand [biggl] [open] [one] {\bigg}
-\definemathcommand [biggm] [rel] [one] {\bigg}
-\definemathcommand [biggr] [close] [one] {\bigg}
-\definemathcommand [Biggl] [open] [one] {\Bigg}
-\definemathcommand [Biggm] [rel] [one] {\Bigg}
-\definemathcommand [Biggr] [close] [one] {\Bigg}
-
%D This needs checking:
-\def\setoperatorlimits#1#2% operator limits
+\unexpanded\def\setoperatorlimits#1#2% operator limits
{\savenormalmeaning{#1}%
- \def#1{\csname normal\strippedcsname#1\endcsname#2}}
-
-\setoperatorlimits\int \intlimits
-\setoperatorlimits\iint \intlimits
-\setoperatorlimits\iiint \intlimits
-\setoperatorlimits\oint \intlimits
-\setoperatorlimits\oiint \intlimits
-\setoperatorlimits\oiiint \intlimits
-\setoperatorlimits\intclockwise \intlimits
-\setoperatorlimits\ointclockwise \intlimits
-\setoperatorlimits\ointctrclockwise \intlimits
-
-%D This is a temporary hack until we figure out how to do this correctly,
-%D preferably using math parameters but we can also consider doing some
-%D node juggling here.
-
-\unexpanded\def\implies {\mathrel{\;\Longrightarrow\;}}
-\unexpanded\def\impliedby{\mathrel{\;\Longleftarrow\;}}
-\unexpanded\def\And {\mathrel{\;\internalAnd\;}}
-%unexpanded\def\iff {\;\Longleftrightarrow\;}
-\setuvalue {iff}{\;\Longleftrightarrow\;} % nicer for if checker
-
-% todo: virtual in math-vfu
-
-% \definemathcommand [mapsto] {\mapstochar\rightarrow}
-% \definemathcommand [hookrightarrow] {\lhook\joinrel\rightarrow}
-% \definemathcommand [hookleftarrow] {\leftarrow\joinrel\rhook}
-% \definemathcommand [bowtie] {\mathrel\triangleright\joinrel\mathrel\triangleleft}
-% \definemathcommand [models] {\mathrel|\joinrel=}
-% \definemathcommand [iff] {\;\Longleftrightarrow\;}
-
-% hm
-
-% ldots = 2026
-% vdots = 22EE
-% cdots = 22EF
-% ddots = 22F1
-% udots = 22F0
-
-% \def\PLAINldots{\ldotp\ldotp\ldotp}
-% \def\PLAINcdots{\cdotp\cdotp\cdotp}
-
-% \def\PLAINvdots
-% {\vbox{\baselineskip.4\bodyfontsize\lineskiplimit\zeropoint\kern.6\bodyfontsize\hbox{.}\hbox{.}\hbox{.}}}
-
-% \def\PLAINddots
-% {\mkern1mu%
-% \raise.7\bodyfontsize\vbox{\kern.7\bodyfontsize\hbox{.}}%
-% \mkern2mu%
-% \raise.4\bodyfontsize\relax\hbox{.}%
-% \mkern2mu%
-% \raise.1\bodyfontsize\hbox{.}%
-% \mkern1mu}
-
-% \definemathcommand [ldots] [inner] {\PLAINldots}
-% \definemathcommand [cdots] [inner] {\PLAINcdots}
-% \definemathcommand [vdots] [nothing] {\PLAINvdots}
-% \definemathcommand [ddots] [inner] {\PLAINddots}
-
-%D \starttyping
-%D $\sqrt[3]{10}$
-%D \stoptyping
-
-\def\rootradical{\Uroot \defaultmathfamily "221A } % can be done in char-def
-
-\def\root#1\of{\rootradical{#1}} % #2
-
-\unexpanded\def\sqrt{\doifnextoptionalelse\rootwithdegree\rootwithoutdegree}
-
-\def\rootwithdegree [#1]{\rootradical{#1}}
-\def\rootwithoutdegree {\rootradical {}}
-
-\definemathcommand [mathstrut] {\vphantom{(}}
-\definemathcommand [joinrel] {\mathrel{\mkern-3mu}}
-
-\unexpanded\def\{{\mathortext\lbrace\letterleftbrace } % or maybe a chardef
-\unexpanded\def\}{\mathortext\rbrace\letterrightbrace} % or maybe a chardef
-\unexpanded\def\|{\mathortext\vert \letterbar } % or maybe a chardef
-
-%D The following colon related definitions are provided by Aditya
-%D Mahajan who derived them from \type {mathtools.sty} and \type
-%D {colonequals.sty}.
+ \expandafter\def\expandafter#1\expandafter{\csname normal\strippedcsname#1\endcsname#2}}
+
+\setoperatorlimits \int \intlimits
+\setoperatorlimits \iint \intlimits
+\setoperatorlimits \iiint \intlimits
+\setoperatorlimits \oint \intlimits
+\setoperatorlimits \oiint \intlimits
+\setoperatorlimits \oiiint \intlimits
+\setoperatorlimits \intclockwise \intlimits
+\setoperatorlimits \ointclockwise \intlimits
+\setoperatorlimits \ointctrclockwise \intlimits
%D \macros
%D {centercolon, colonminus, minuscolon, colonequals, equalscolon,
@@ -238,6 +90,11 @@
%D equalscoloncolon, coloncolonapprox, approxcoloncolon,
%D colonsim, simcoloncolon}
%D
+%D The following colon related definitions are provided by Aditya
+%D Mahajan who derived them from \type {mathtools.sty} and \type
+%D {colonequals.sty}. This will be redone as part of the overhaul
+%D and font updates.
+%D
%D In $a := b$ the colon is not vertically centered with the equal
%D to. Also the distance between colon and equal to is a bit large.
%D So, we define a vertically centered colon \tex {centercolon} and
@@ -263,16 +120,13 @@
%D \formula {A \colonsim B}
%D \formula {A \simcoloncolon B}
%D \stoplines
-
+%D
%D The next macros take care of the space between the colon and the
%D relation symbol.
\definemathcommand [colonsep] {\mkern-1.2mu}
\definemathcommand [doublecolonsep] {\mkern-0.9mu}
-%D Now we define all the colon relations .. needs checking with char-def.lua ...
-%d will move to a separate module.
-
\definemathcommand [centercolon] [rel] {\mathstylevcenteredhbox\colon}
\definemathcommand [colonminus] [rel] {\centercolon\colonsep\mathrel{-}}
\definemathcommand [minuscolon] [rel] {\mathrel{-}\colonsep\centercolon} % native char
@@ -293,93 +147,6 @@
\definemathcommand [colonsim] [rel] {\coloncolon\colonsep\sim}
\definemathcommand [simcoloncolon] [rel] {\sim\coloncolon\colonsep}
-%D Goodies. We might move this elsewhere.
-
-% Be careful in choosing what accents you take (the code below uses a
-% combining one):
-%
-% \startbuffer
-% % $\Umathaccent top 0 0 "20D7 {example}$
-% % $\Umathaccent top fixed 0 0 "20D7 {example}$
-% $\Umathaccent 0 0 "20D7 {example}$
-% $\Umathaccent fixed 0 0 "20D7 {example}$
-% $\Umathaccent bottom 0 0 "20D7 {example}$
-% $\Umathaccent bottom fixed 0 0 "20D7 {example}$
-% $\Umathaccent both 0 0 "20D7
-% 0 0 "20D7 {example}$
-% $\Umathaccent both fixed 0 0 "20D7
-% fixed 0 0 "20D7 {example}$
-% $\Umathaccent both 0 0 "20D7
-% fixed 0 0 "20D7 {example}$
-% $\Umathaccent both fixed 0 0 "20D7
-% 0 0 "20D7 {example}$
-% \stopbuffer
-%
-% \setupbodyfont[modern] \getbuffer
-% \setupbodyfont[xits] \getbuffer
-% \setupbodyfont[cambria] \getbuffer
-
-\unexpanded\def\underleftarrow #1{\mathop{\Uunderdelimiter \defaultmathfamily "2190 {#1}}}
-\unexpanded\def\overleftarrow #1{\mathop{\Uoverdelimiter \defaultmathfamily "2190 {#1}}}
-\unexpanded\def\underrightarrow#1{\mathop{\Uunderdelimiter \defaultmathfamily "2192 {#1}}}
-\unexpanded\def\overrightarrow #1{\mathop{\Uoverdelimiter \defaultmathfamily "2192 {#1}}}
-
-% watch out: here we have a class (zero):
-
-\unexpanded\def\normaldoublebrace {\Umathaccents 0 \defaultmathfamily "23DE 0 \defaultmathfamily "23DF }
-\unexpanded\def\normaldoubleparent{\Umathaccents 0 \defaultmathfamily "23DC 0 \defaultmathfamily "23DD }
-
-% let's keep this
-
-\def\Umathbotaccent{\Umathaccent \s!bottom }
-\def\Umathaccents {\Umathaccent \s!both }
-
-\let\normaloverbrace \overbrace
-\let\normalunderbrace \underbrace
-\let\normaloverparent \overparent
-\let\normalunderparent \underparent
-\let\normaloverbracket \overbracket
-\let\normalunderbracket \underbracket
-\let\normalunderleftarrow \underleftarrow
-\let\normaloverleftarrow \overleftarrow
-\let\normalunderrightarrow\underrightarrow
-\let\normaloverrightarrow \overrightarrow
-
-\unexpanded\def\mathopwithlimits#1#2{\mathop{#1{#2}}\limits}
-\unexpanded\def\stackrel #1#2{\mathrel{\mathop{#2}\limits^{#1}}}
-
-\unexpanded\def\overbrace {\mathopwithlimits\normaloverbrace }
-\unexpanded\def\underbrace {\mathopwithlimits\normalunderbrace }
-\unexpanded\def\doublebrace {\mathopwithlimits\normaldoublebrace }
-\unexpanded\def\overparent {\mathopwithlimits\normaloverparent }
-\unexpanded\def\underparent {\mathopwithlimits\normalunderparent }
-\unexpanded\def\overbracket {\mathopwithlimits\normaloverbracket }
-\unexpanded\def\underbracket {\mathopwithlimits\normalunderbracket }
-\unexpanded\def\doubleparent {\mathopwithlimits\normaldoubleparent }
-\unexpanded\def\underleftarrow {\mathopwithlimits\normalunderleftarrow }
-\unexpanded\def\overleftarrow {\mathopwithlimits\normaloverleftarrow }
-\unexpanded\def\underrightarrow{\mathopwithlimits\normalunderrightarrow}
-\unexpanded\def\overrightarrow {\mathopwithlimits\normaloverrightarrow }
-
-\let\lceil \lceiling
-\let\rceil \rceiling
-
-\let\normalsurd\surd
-
-\unexpanded\def\surd{\normalsurd{}}
-
-% Some special characters:
-
-\unexpanded\def\nabla{∇} % this one adapts
-
-%
-% todo mathclass=punctuation ord
-
-% \Umathcode"02C="6 \defaultmathfamily "02C
-% \Umathcode"02E="0 \defaultmathfamily "02E
-
-% tricky .. todo
-
% \appendtoks
% \def\over{\primitive\over}%
% \to \everymathematics
diff --git a/Master/texmf-dist/tex/context/base/math-del.mkiv b/Master/texmf-dist/tex/context/base/math-del.mkiv
index 64657281810..1245d31de3e 100644
--- a/Master/texmf-dist/tex/context/base/math-del.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-del.mkiv
@@ -61,7 +61,10 @@
\unexpanded\def\enablefiller {\let\normalorfiller\secondoftwoarguments}
\unexpanded\def\disablefiller {\let\normalorfiller\firstoftwoarguments}
-\def\mathopnolimits#1{\mathop{\mr#1}\nolimits} % was \rm, which follows text fonts (used in mml parser)
-\def\mathopdolimits#1{\mathop{\mr#1}} % was \rm, which follows text fonts (used in mml parser)
+% \def\mathopnolimits#1{\mathop{\mr\mathrm#1}\nolimits} % was \rm, which follows text fonts (used in mml parser)
+% \def\mathopdolimits#1{\mathop{\mr\mathrm#1}} % was \rm, which follows text fonts (used in mml parser)
+
+\def\mathopnolimits#1{\mathop{\mathrm#1}\nolimits} % was \rm, which follows text fonts (used in mml parser, check!)
+\def\mathopdolimits#1{\mathop{\mathrm#1}} % was \rm, which follows text fonts (used in mml parser, check!)
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/math-dir.lua b/Master/texmf-dist/tex/context/base/math-dir.lua
new file mode 100644
index 00000000000..bcc5461e976
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/math-dir.lua
@@ -0,0 +1,166 @@
+if not modules then modules = { } end modules ['math-dir'] = {
+ version = 1.001,
+ comment = "companion to typo-dir.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- As I'm wrapping up the updated math support (for CTX/TUG 2013) I wondered about numbers in
+-- r2l math mode. Googling lead me to TUGboat, Volume 25 (2004), No. 2 where I see numbers
+-- running from left to right. Makes me wonder how far we should go. And as I was looking
+-- into bidi anyway, it's a nice distraction.
+--
+-- I first tried to hook something into noads but that gets pretty messy due to indirectness
+-- char noads. If needed, I'll do it that way. With regards to spacing: as we can assume that
+-- only numbers are involved we can safely swap them and the same is true for mirroring. But
+-- anyway, I'm not too happy with this solution so eventually I'll do something with noads (as
+-- an alternative method). Yet another heuristic approach.
+
+local nodes, node = nodes, node
+
+local trace_directions = false trackers.register("typesetters.directions.math", function(v) trace_directions = v end)
+
+local report_directions = logs.reporter("typesetting","math directions")
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getlist = nuts.getlist
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+
+local nodecodes = nodes.nodecodes
+local tasks = nodes.tasks
+
+local glyph_code = nodecodes.glyph
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+
+local nodepool = nuts.pool
+
+local new_textdir = nodepool.textdir
+
+local chardirections = characters.directions
+local charmirrors = characters.mirrors
+local charclasses = characters.textclasses
+
+local directions = typesetters.directions or { }
+
+local a_mathbidi = attributes.private('mathbidi')
+
+local function processmath(head)
+ local current = head
+ local done = false
+ local start = nil
+ local stop = nil
+ local function capsulate()
+ head = insert_node_before(head,start,new_textdir("+TLT"))
+ insert_node_after(head,stop,new_textdir("-TLT"))
+ if trace_directions then
+ report_directions("reversed: %s",nodes.listtoutf(start,false,false,stop))
+ end
+ done = true
+ start = false
+ stop = nil
+ end
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ local char = getchar(current)
+ local cdir = chardirections[char]
+ if cdir == "en" or cdir == "an" then -- we could check for mathclass punctuation
+ if not start then
+ start = current
+ end
+ stop = current
+ else
+ if not start then
+ -- nothing
+ elseif start == stop then
+ start = nil
+ else
+ capsulate()
+ end
+ if cdir == "on" then
+ local mirror = charmirrors[char]
+ if mirror then
+ local class = charclasses[char]
+ if class == "open" or class == "close" then
+ setfield(current,"char",mirror)
+ if trace_directions then
+ report_directions("mirrored: %C to %C",char,mirror)
+ end
+ done = true
+ end
+ end
+ end
+ end
+ elseif not start then
+ -- nothing
+if id == hlist_code or id == vlist_code then
+ local list, d = processmath(getlist(current))
+ setfield(current,"list",list)
+ if d then
+ done = true
+ end
+end
+ elseif start == stop then
+ start = nil
+ else
+ capsulate(head,start,stop)
+ -- math can pack things into hlists .. we need to make sure we don't process
+ -- too often: needs checking
+ if id == hlist_code or id == vlist_code then
+ local list, d = processmath(getlist(current))
+ setfield(current,"list",list)
+ if d then
+ done = true
+ end
+ end
+ end
+ current = getnext(current)
+ end
+ if not start then
+ -- nothing
+ elseif start == stop then
+ -- nothing
+ else
+ capsulate()
+ end
+ return head, done
+end
+
+local enabled = false
+
+function directions.processmath(head) -- style, penalties
+ if enabled then
+ local h = tonut(head)
+ local a = getattr(h,a_mathbidi)
+ if a and a > 0 then
+ local head, done = processmath(h)
+ return tonode(head), done
+ end
+ end
+ return head, false
+end
+
+function directions.setmath(n)
+ if not enabled and n and n > 0 then
+ if trace_directions then
+ report_directions("enabling directions handler")
+ end
+ tasks.enableaction("math","typesetters.directions.processmath")
+ enabled = true
+ end
+end
+
+commands.setmathdirection = directions.setmath
diff --git a/Master/texmf-dist/tex/context/base/math-fbk.lua b/Master/texmf-dist/tex/context/base/math-fbk.lua
index eebc4e4e7d6..70a8ae8d61c 100644
--- a/Master/texmf-dist/tex/context/base/math-fbk.lua
+++ b/Master/texmf-dist/tex/context/base/math-fbk.lua
@@ -6,17 +6,21 @@ if not modules then modules = { } end modules ['math-fbk'] = {
license = "see context related readme files"
}
-local trace_fallbacks = false trackers.register("math.fallbacks", function(v) trace_fallbacks = v end)
+local trace_fallbacks = false trackers.register("math.fallbacks", function(v) trace_fallbacks = v end)
-local report_fallbacks = logs.reporter("math","fallbacks")
+local report_fallbacks = logs.reporter("math","fallbacks")
-local fallbacks = { }
-mathematics.fallbacks = fallbacks
+local formatters = string.formatters
+local fastcopy = table.fastcopy
+
+local fallbacks = { }
+mathematics.fallbacks = fallbacks
local virtualcharacters = { }
-local identifiers = fonts.hashes.identifiers
-local lastmathids = fonts.hashes.lastmathids
+local identifiers = fonts.hashes.identifiers
+local lastmathids = fonts.hashes.lastmathids
+local tounicode16 = fonts.mappings.tounicode16
-- we need a trick (todo): if we define scriptscript, script and text in
-- that order we could use their id's .. i.e. we could always add a font
@@ -25,6 +29,11 @@ local lastmathids = fonts.hashes.lastmathids
--
-- todo: use index 'true when luatex provides that feature (on the agenda)
+-- to be considered:
+--
+-- in luatex provide reserve_id (and pass id as field of tfmdata)
+-- in context define three sizes but pass them later i.e. do virtualize afterwards
+
function fallbacks.apply(target,original)
local mathparameters = target.mathparameters -- why not hasmath
if mathparameters then
@@ -39,7 +48,7 @@ function fallbacks.apply(target,original)
end
-- This is not okay yet ... we have no proper way to refer to 'self'
-- otherwise I will make my own id allocator).
-local self = #usedfonts == 0 and font.nextid() or nil -- will be true
+ local self = #usedfonts == 0 and font.nextid() or nil -- will be true
local textid, scriptid, scriptscriptid
local textindex, scriptindex, scriptscriptindex
local textdata, scriptdata, scriptscriptdata
@@ -48,26 +57,27 @@ local self = #usedfonts == 0 and font.nextid() or nil -- will be true
-- textid = nil -- self
-- scriptid = nil -- no smaller
-- scriptscriptid = nil -- no smaller
-textid = self
-scriptid = self
-scriptscriptid = self
+ textid = self
+ scriptid = self
+ scriptscriptid = self
elseif mathsize == 2 then
-- scriptsize
-- textid = nil -- self
-textid = self
+ textid = self
scriptid = lastmathids[3]
scriptscriptid = lastmathids[3]
else
-- textsize
-- textid = nil -- self
-textid = self
+ textid = self
scriptid = lastmathids[2]
scriptscriptid = lastmathids[3]
end
if textid then
textindex = #usedfonts + 1
usedfonts[textindex] = { id = textid }
- textdata = identifiers[textid]
+-- textdata = identifiers[textid] or target
+ textdata = target
else
textdata = target
end
@@ -87,8 +97,7 @@ textid = self
scriptscriptindex = scriptindex
scriptscriptdata = scriptdata
end
--- report_fallbacks("used textid: %s, used script id: %s, used scriptscript id: %s",
--- tostring(textid),tostring(scriptid),tostring(scriptscriptid))
+ -- report_fallbacks("used textid: %S, used script id: %S, used scriptscript id: %S",textid,scriptid,scriptscriptid)
local data = {
textdata = textdata,
scriptdata = scriptdata,
@@ -96,6 +105,9 @@ textid = self
textindex = textindex,
scriptindex = scriptindex,
scriptscriptindex = scriptscriptindex,
+ textid = textid,
+ scriptid = scriptid,
+ scriptscriptid = scriptscriptid,
characters = characters,
unicode = k,
target = target,
@@ -103,24 +115,30 @@ textid = self
size = size,
mathsize = mathsize,
}
--- inspect(usedfonts)
+ target.mathrelation = data
+ -- inspect(usedfonts)
for k, v in next, virtualcharacters do
if not characters[k] then
local tv = type(v)
+ local cd = nil
if tv == "table" then
- characters[k] = v
+ cd = v
elseif tv == "number" then
- characters[k] = characters[v]
+ cd = characters[v]
elseif tv == "function" then
- characters[k] = v(data)
+ cd = v(data)
end
- if trace_fallbacks then
- if characters[k] then
- report_fallbacks("extending font %a with %U",target.properties.fullname,k)
- end
+ if cd then
+ characters[k] = cd
+ else
+ -- something else
+ end
+ if trace_fallbacks and characters[k] then
+ report_fallbacks("extending math font %a with %U",target.properties.fullname,k)
end
end
end
+ data.unicode = nil
end
end
@@ -162,12 +180,12 @@ end
-- virtualcharacters[0x208B] = 0x002B
virtualcharacters[0x207A] = function(data)
- data.replacement = 0x2212
+ data.replacement = 0x002B
return raised(data)
end
virtualcharacters[0x207B] = function(data)
- data.replacement = 0x002B
+ data.replacement = 0x2212
return raised(data)
end
@@ -310,3 +328,209 @@ virtualcharacters[0xFE352] = function(data)
end
end
+-- we could move the defs from math-act here
+
+local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap,offset)
+ local characters = target.characters
+ local olddata = characters[oldchr]
+ -- brrr ... pagella has only next
+ if olddata and not olddata.commands and olddata.width > 0 then
+ local addprivate = fonts.helpers.addprivate
+ if swap then
+ swap = characters[swap]
+ height = swap.depth
+ depth = 0
+ else
+ height = height or 0
+ depth = depth or 0
+ end
+ local correction = swap and { "down", (olddata.height or 0) - height } or { "down", olddata.height + (offset or 0)}
+ local newdata = {
+ commands = { correction, { "slot", 1, oldchr } },
+ width = olddata.width,
+ height = height,
+ depth = depth,
+ }
+ local glyphdata = newdata
+ local nextglyph = olddata.next
+ while nextglyph do
+ local oldnextdata = characters[nextglyph]
+ if oldnextdata then
+ local newnextdata = {
+ commands = { correction, { "slot", 1, nextglyph } },
+ width = oldnextdata.width,
+ height = height,
+ depth = depth,
+ }
+ local newnextglyph = addprivate(target,formatters["M-N-%H"](nextglyph),newnextdata)
+ newdata.next = newnextglyph
+ local nextnextglyph = oldnextdata.next
+ if nextnextglyph == nextglyph then
+ break
+ else
+ olddata = oldnextdata
+ newdata = newnextdata
+ nextglyph = nextnextglyph
+ end
+ else
+ report_fallbacks("error in fallback: no valid next, slot %X",nextglyph)
+ break
+ end
+ end
+ local hv = olddata.horiz_variants
+ if hv then
+ hv = fastcopy(hv)
+ newdata.horiz_variants = hv
+ for i=1,#hv do
+ local hvi = hv[i]
+ local oldglyph = hvi.glyph
+ local olddata = characters[oldglyph]
+ if olddata then
+ local newdata = {
+ commands = { correction, { "slot", 1, oldglyph } },
+ width = olddata.width,
+ height = height,
+ depth = depth,
+ }
+ hvi.glyph = addprivate(target,formatters["M-H-%H"](oldglyph),newdata)
+ else
+ report_fallbacks("error in fallback: no valid horiz_variants, slot %X, index %i",oldglyph,i)
+ end
+ end
+ end
+ return glyphdata, true
+ else
+ return olddata, false
+ end
+end
+
+virtualcharacters[0x203E] = function(data) -- could be FE33E instead
+ local target = data.target
+ local height, depth = 0, 0
+ local mathparameters = target.mathparameters
+ if mathparameters then
+ height = mathparameters.OverbarVerticalGap
+ depth = mathparameters.UnderbarVerticalGap
+ else
+ height = target.parameters.xheight/4
+ depth = height
+ end
+ return accent_to_extensible(target,0x203E,data.original,0x0305,height,depth)
+end
+
+virtualcharacters[0xFE33E] = virtualcharacters[0x203E] -- convenient
+virtualcharacters[0xFE33F] = virtualcharacters[0x203E] -- convenient
+
+local function smashed(data,unicode,swap,private)
+ local target = data.target
+ local original = data.original
+ local chardata = target.characters[unicode]
+ if chardata and chardata.height > target.parameters.xheight then
+ return accent_to_extensible(target,private,original,unicode,0,0,swap)
+ else
+ return original.characters[unicode]
+ end
+end
+
+addextra(0xFE3DE, { description="EXTENSIBLE OF 0x03DE", unicodeslot=0xFE3DE, mathextensible = "r", mathstretch = "h" } )
+addextra(0xFE3DC, { description="EXTENSIBLE OF 0x03DC", unicodeslot=0xFE3DC, mathextensible = "r", mathstretch = "h" } )
+addextra(0xFE3B4, { description="EXTENSIBLE OF 0x03B4", unicodeslot=0xFE3B4, mathextensible = "r", mathstretch = "h" } )
+
+virtualcharacters[0xFE3DE] = function(data) return smashed(data,0x23DE,0x23DF,0xFE3DE) end
+virtualcharacters[0xFE3DC] = function(data) return smashed(data,0x23DC,0x23DD,0xFE3DC) end
+virtualcharacters[0xFE3B4] = function(data) return smashed(data,0x23B4,0x23B5,0xFE3B4) end
+
+addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mathextensible = "r", mathstretch = "h" } )
+addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h" } )
+addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h" } )
+
+virtualcharacters[0xFE3DF] = function(data) return data.target.characters[0x23DF] end
+virtualcharacters[0xFE3DD] = function(data) return data.target.characters[0x23DD] end
+virtualcharacters[0xFE3B5] = function(data) return data.target.characters[0x23B5] end
+
+-- todo: add some more .. numbers might change
+
+addextra(0xFE302, { description="EXTENSIBLE OF 0x0302", unicodeslot=0xFE302, mathstretch = "h" } )
+addextra(0xFE303, { description="EXTENSIBLE OF 0x0303", unicodeslot=0xFE303, mathstretch = "h" } )
+
+local function smashed(data,unicode,private)
+ local target = data.target
+ local height = target.parameters.xheight / 2
+ local c, done = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height)
+ if done then
+ c.top_accent = nil -- or maybe also all the others
+ end
+ return c
+end
+
+virtualcharacters[0xFE302] = function(data) return smashed(data,0x0302,0xFE302) end
+virtualcharacters[0xFE303] = function(data) return smashed(data,0x0303,0xFE303) end
+
+-- another crazy hack .. doesn't work as we define scrscr first .. we now have smaller
+-- primes so we have smaller primes for the moment, big ones will become an option
+
+local function smashed(data,unicode,optional)
+ local oldchar = data.characters[unicode]
+ if oldchar then
+ local height = 1.2 * data.target.parameters.xheight
+ local newchar = {
+ commands = {
+ { "down", oldchar.height - height },
+ { "char", unicode },
+ },
+ height = height,
+ width = oldchar.width,
+ }
+ return newchar
+ elseif not optional then
+ report_fallbacks("missing %U prime in font %a",unicode,data.target.properties.fullname)
+ end
+end
+
+addextra(0xFE932, { description="SMASHED PRIME 0x02032", unicodeslot=0xFE932 } )
+addextra(0xFE933, { description="SMASHED PRIME 0x02033", unicodeslot=0xFE933 } )
+addextra(0xFE934, { description="SMASHED PRIME 0x02034", unicodeslot=0xFE934 } )
+addextra(0xFE957, { description="SMASHED PRIME 0x02057", unicodeslot=0xFE957 } )
+
+addextra(0xFE935, { description="SMASHED BACKWARD PRIME 0x02035", unicodeslot=0xFE935 } )
+addextra(0xFE936, { description="SMASHED BACKWARD PRIME 0x02036", unicodeslot=0xFE936 } )
+addextra(0xFE937, { description="SMASHED BACKWARD PRIME 0x02037", unicodeslot=0xFE937 } )
+
+virtualcharacters[0xFE932] = function(data) return smashed(data,0x02032) end
+virtualcharacters[0xFE933] = function(data) return smashed(data,0x02033) end
+virtualcharacters[0xFE934] = function(data) return smashed(data,0x02034) end
+virtualcharacters[0xFE957] = function(data) return smashed(data,0x02057) end
+
+virtualcharacters[0xFE935] = function(data) return smashed(data,0x02035,true) end
+virtualcharacters[0xFE936] = function(data) return smashed(data,0x02036,true) end
+virtualcharacters[0xFE937] = function(data) return smashed(data,0x02037,true) end
+
+-- actuarian (beware: xits has an ugly one)
+
+addextra(0xFE940, { category = "mn", description="SMALL ANNUITY SYMBOL", unicodeslot=0xFE940, mathclass="topaccent", mathname="smallactuarial" })
+
+local function actuarian(data)
+ local characters = data.target.characters
+ local parameters = data.target.parameters
+ local basechar = characters[0x0078] -- x (0x0058 X) or 0x1D431
+ local linewidth = parameters.xheight / 10
+ local basewidth = basechar.width
+ local baseheight = basechar.height
+ return {
+ -- todo: add alttext
+ -- compromise: lm has large hooks e.g. \actuarial{a}
+ width = basewidth + 4 * linewidth,
+ tounicode = tounicode16(0x20E7),
+ commands = {
+ { "right", 2 * linewidth },
+ { "down", - baseheight - 3 * linewidth },
+ { "rule", linewidth, basewidth + 4 * linewidth },
+ { "right", -linewidth },
+ { "down", baseheight + 4 * linewidth },
+ { "rule", baseheight + 5 * linewidth, linewidth },
+ },
+ }
+end
+
+virtualcharacters[0x020E7] = actuarian -- checked
+virtualcharacters[0xFE940] = actuarian -- unchecked
diff --git a/Master/texmf-dist/tex/context/base/math-fen.mkiv b/Master/texmf-dist/tex/context/base/math-fen.mkiv
index f7edc582e32..33afbf675ae 100644
--- a/Master/texmf-dist/tex/context/base/math-fen.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-fen.mkiv
@@ -32,18 +32,15 @@
% test $a\fancybracket{\frac{1}{b}}c$ test \par
\installcorenamespace{mathfences}
-\installcorenamespace{mathfencesleft}
-\installcorenamespace{mathfencesmiddle}
-\installcorenamespace{mathfencesright}
\installcommandhandler \??mathfences {mathfence} \??mathfences
\let\setupmathfences\setupmathfence
\setupmathfences
- [\c!left=0x2E,
- \c!right=0x2E,
- \c!middle=0x2E,
+ [\c!left=,
+ \c!right=,
+ \c!middle=,
\c!mathstyle=,
\c!color=,
\c!command=]
@@ -55,40 +52,61 @@
\fi
\to \everydefinemathfence
-\def\math_fenced_left {\normalleft \utfchar{\mathfenceparameter\c!left }} % no Uchar here
-\def\math_fenced_middle{\normalmiddle\utfchar{\mathfenceparameter\c!middle}} % no Uchar here
-\def\math_fenced_right {\normalright \utfchar{\mathfenceparameter\c!right }} % no Uchar here
+% we need the direct use of \Udelimiter because of { etc
+
+\def\math_fenced_left {\edef\p_left{\mathfenceparameter\c!left}%
+ \math_fenced_color_push
+ \normalleft\ifx\p_left\empty.\else\Udelimiter\plusfour\fam\p_left\relax\fi
+ \math_fenced_color_pop}
+\def\math_fenced_middle{\edef\p_middle{\mathfenceparameter\c!middle}%
+ \mskip\thinmuskip
+ \math_fenced_color_push
+ \normalmiddle\ifx\p_middle\empty.\else\Udelimiter\plusfour\fam\p_middle\relax\fi
+ \math_fenced_color_pop
+ \mskip\thinmuskip}
+\def\math_fenced_right {\edef\p_right{\mathfenceparameter\c!right}%
+ \math_fenced_color_push
+ \normalright\ifx\p_right\empty.\else\Udelimiter\plusfive\fam\p_right\relax\fi
+ \math_fenced_color_pop}
+
+\def\math_fenced_color_do_push{\pushcolor[\p_math_fenced_color]}
+\let\math_fenced_color_do_pop \popcolor
+
+\let\math_fenced_color_push\donothing
+\let\math_fenced_color_pop \donothing
\let\fence \relax
\let\fenced\relax
-\unexpanded\def\math_fenced_fenced[#1]%
- {\begingroup
+\newcount\c_math_fenced_nesting
+
+\unexpanded\def\math_fenced_fenced_start#1%
+ {\advance\c_math_fenced_nesting\plusone
+ \begingroup
\edef\currentmathfence{#1}%
\startusemathstyleparameter\mathfenceparameter
\let\fence\math_fenced_middle
\edef\p_math_fenced_color{\mathfenceparameter\c!color}%
\ifx\p_math_fenced_color\empty
- \expandafter\math_fenced_normal
+ \let\math_fenced_color_push\donothing
+ \let\math_fenced_color_pop \donothing
\else
- \expandafter\math_fenced_colored
- \fi}
+ \let\math_fenced_color_push\math_fenced_color_do_push
+ \let\math_fenced_color_pop \math_fenced_color_do_pop
+ \fi
+ \math_fenced_left}
-\def\math_fenced_normal#1%
- {\math_fenced_left
- #1%
+\unexpanded\def\math_fenced_fenced_stop#1%
+ {\edef\currentmathfence{#1}%
\math_fenced_right
\stopusemathstyleparameter
- \endgroup}
+ \endgroup
+ \advance\c_math_fenced_nesting\minusone}
-\def\math_fenced_colored#1%
- {\pushcolor[\p_math_fenced_color]%
- \math_fenced_left
- \popcolor
- #1%
- \pushcolor[\p_math_fenced_color]%
+\unexpanded\def\math_fenced_fenced[#1]#2%
+ {\math_fenced_fenced_start{#1}%
+ #2%
\math_fenced_right
- \popcolor
\stopusemathstyleparameter
\endgroup}
@@ -96,13 +114,354 @@
\let\fenced\math_fenced_fenced
\to \everymathematics
-\definemathfence [parenthesis] [\c!left=0x28,\c!right=0x29]
-\definemathfence [bracket] [\c!left=0x5B,\c!right=0x5D]
-\definemathfence [braces] [\c!left=0x7B,\c!right=0x7D]
-\definemathfence [bar] [\c!left=0x7C,\c!right=0x7C]
-\definemathfence [doublebar] [\c!left=0x2016,\c!right=0x2016]
-\definemathfence [angle] [\c!left=0x3C,\c!right=0x3E]
+% todo: | in mathmode letter
+%
+% \appendtoks
+% \let\bar\letterbar
+% \to \everymathematics
+%
+% but then we don't have it in embedded text too so ...
+
+\definemathfence [parenthesis] [\c!left="0028,\c!right="0029]
+\definemathfence [bracket] [\c!left="005B,\c!right="005D]
+\definemathfence [brace] [\c!left="007B,\c!right="007D]
+\definemathfence [bar] [\c!left="007C,\c!right="007C]
+\definemathfence [doublebar] [\c!left="2016,\c!right="2016]
+\definemathfence [triplebar] [\c!left="2980,\c!right="2980]
+\definemathfence [angle] [\c!left="27E8,\c!right="27E9]
+\definemathfence [doubleangle] [\c!left="27EA,\c!right="27EB]
+\definemathfence [solidus] [\c!left="2044,\c!right="2044]
+\definemathfence [nothing]
+
+\definemathfence [mirrored] % \v!mirrored
+
+\definemathfence [mirroredparenthesis] [mirrored] [\c!right="0028,\c!left="0029]
+\definemathfence [mirroredbracket] [mirrored] [\c!right="005B,\c!left="005D]
+\definemathfence [mirroredbrace] [mirrored] [\c!right="007B,\c!left="007D]
+\definemathfence [mirroredbar] [mirrored] [\c!right="007C,\c!left="007C]
+\definemathfence [mirroreddoublebar] [mirrored] [\c!right="2016,\c!left="2016]
+\definemathfence [mirroredtriplebar] [mirrored] [\c!right="2980,\c!left="2980]
+\definemathfence [mirroredangle] [mirrored] [\c!right="27E8,\c!left="27E9]
+\definemathfence [mirroreddoubleangle] [mirrored] [\c!right="27EA,\c!left="27EB]
+\definemathfence [mirroredsolidus] [mirrored] [\c!right="2044,\c!left="2044]
+\definemathfence [mirrorednothing] [mirrored]
+
+%D A bonus:
+
+\unexpanded\def\Lparent {\math_fenced_fenced_start{parenthesis}} \unexpanded\def\Rparent {\math_fenced_fenced_stop{parenthesis}}
+\unexpanded\def\Lbracket {\math_fenced_fenced_start{bracket}} \unexpanded\def\Rbracket {\math_fenced_fenced_stop{bracket}}
+\unexpanded\def\Lbrace {\math_fenced_fenced_start{brace}} \unexpanded\def\Rbrace {\math_fenced_fenced_stop{brace}}
+\unexpanded\def\Langle {\math_fenced_fenced_start{angle}} \unexpanded\def\Rangle {\math_fenced_fenced_stop{angle}}
+\unexpanded\def\Ldoubleangle {\math_fenced_fenced_start{doubleangle}} \unexpanded\def\Rdoubleangle {\math_fenced_fenced_stop{doubleangle}}
+\unexpanded\def\Lbar {\math_fenced_fenced_start{bar}} \unexpanded\def\Rbar {\math_fenced_fenced_stop{bar}}
+\unexpanded\def\Ldoublebar {\math_fenced_fenced_start{doublebar}} \unexpanded\def\Rdoublebar {\math_fenced_fenced_stop{doublebar}}
+\unexpanded\def\Ltriplebar {\math_fenced_fenced_start{triplebar}} \unexpanded\def\Rtriplebar {\math_fenced_fenced_stop{triplebar}}
+\unexpanded\def\Lsolidus {\math_fenced_fenced_start{solidus}} \unexpanded\def\Rsolidus {\math_fenced_fenced_stop{solidus}}
+\unexpanded\def\Lnothing {\math_fenced_fenced_start{nothing}} \unexpanded\def\Rnothing {\math_fenced_fenced_stop{nothing}}
+
+\unexpanded\def\Lparentmirrored {\math_fenced_fenced_stop{mirroredparenthesis}} \unexpanded\def\Rparentmirrored {\math_fenced_fenced_start{mirroredparenthesis}}
+\unexpanded\def\Lbracketmirrored {\math_fenced_fenced_stop{mirroredbracket}} \unexpanded\def\Rbracketmirrored {\math_fenced_fenced_start{mirroredbracket}}
+\unexpanded\def\Lbracemirrored {\math_fenced_fenced_stop{mirroredbrace}} \unexpanded\def\Rbracemirrored {\math_fenced_fenced_start{mirroredbrace}}
+\unexpanded\def\Langlemirrored {\math_fenced_fenced_stop{mirroredangle}} \unexpanded\def\Ranglemirrored {\math_fenced_fenced_start{mirroredangle}}
+\unexpanded\def\Ldoubleanglemirrored {\math_fenced_fenced_stop{mirroreddoubleangle}} \unexpanded\def\Rdoubleanglemirrored {\math_fenced_fenced_start{mirroreddoubleangle}}
+\unexpanded\def\Lbarmirrored {\math_fenced_fenced_stop{mirroredbar}} \unexpanded\def\Rbarmirrored {\math_fenced_fenced_start{mirroredbar}}
+\unexpanded\def\Ldoublebarmirrored {\math_fenced_fenced_stop{mirroreddoublebar}} \unexpanded\def\Rdoublebarmirrored {\math_fenced_fenced_start{mirroreddoublebar}}
+\unexpanded\def\Ltriplebarmirrored {\math_fenced_fenced_stop{mirroredtriplebar}} \unexpanded\def\Rtriplebarmirrored {\math_fenced_fenced_start{mirroredtriplebar}}
+\unexpanded\def\Lsolidusmirrored {\math_fenced_fenced_stop{mirroredsolidus}} \unexpanded\def\Rsolidusmirrored {\math_fenced_fenced_start{mirroredsolidus}}
+\unexpanded\def\Lnothingmirrored {\math_fenced_fenced_stop{mirrorednothing}} \unexpanded\def\Rnothingmirrored {\math_fenced_fenced_start{mirrorednothing}}
+
+%D And another one:
+
+% \setupmathfences[color=darkgreen]
+%
+% \startformula
+% \left{ \frac{1}{a} \right}
+% \left[ \frac{1}{b} \right]
+% \left( \frac{1}{c} \right)
+% \left< \frac{1}{d} \right>
+% \left| \frac{1}{e} \right|
+% \stopformula
+
+\installcorenamespace{mathleft}
+\installcorenamespace{mathright}
+\installcorenamespace{mathmiddle}
+
+\unexpanded\def\left {\afterassignment\math_left \let\nexttoken}
+\unexpanded\def\right {\afterassignment\math_right \let\nexttoken}
+\unexpanded\def\middle{\afterassignment\math_middle\let\nexttoken}
+
+\newconditional\c_math_fenced_done
+\newconditional\c_math_fenced_unknown \settrue\c_math_fenced_unknown
+
+\def\math_left
+ {\settrue\c_math_fenced_done
+ \edef\m_math_left{\meaning\nexttoken}%
+ \csname\??mathleft\ifcsname\??mathleft\m_math_left\endcsname\m_math_left\else\s!unknown\fi\endcsname}
+
+\def\math_right
+ {\settrue\c_math_fenced_done
+ \edef\m_math_right{\meaning\nexttoken}%
+ \csname\??mathright\ifcsname\??mathright\m_math_right\endcsname\m_math_right\else\s!unknown\fi\endcsname}
+
+\def\math_middle
+ {\settrue\c_math_fenced_done
+ \edef\m_math_middle{\meaning\nexttoken}%
+ \csname\??mathmiddle\ifcsname\??mathmiddle\m_math_middle\endcsname\m_math_middle\else\s!unknown\fi\endcsname}
+
+\setvalue{\??mathleft \s!unknown}{\setfalse\c_math_fenced_done\ifconditional\c_math_fenced_unknown\normalleft \nexttoken\fi}
+\setvalue{\??mathright \s!unknown}{\setfalse\c_math_fenced_done\ifconditional\c_math_fenced_unknown\normalright \nexttoken\fi}
+\setvalue{\??mathmiddle\s!unknown}{\setfalse\c_math_fenced_done\ifconditional\c_math_fenced_unknown\normalmiddle\nexttoken\fi}
+
+\unexpanded\def\installmathfencepair#1#2#3#4%
+ {\expandafter\let\csname\??mathleft \meaning#1\endcsname#2%
+ \expandafter\let\csname\??mathright\meaning#3\endcsname#4}
+
+\expandafter\let\csname\??mathleft \meaning [\endcsname\Lbracket
+\expandafter\let\csname\??mathleft \meaning (\endcsname\Lparent
+\expandafter\let\csname\??mathleft \meaning <\endcsname\Langle
+\expandafter\let\csname\??mathleft \meaning ⟨\endcsname\Langle
+\expandafter\let\csname\??mathleft \meaning ⟪\endcsname\Ldoubleangle
+\expandafter\let\csname\??mathleft \meaning {\endcsname\Lbrace
+\expandafter\let\csname\??mathleft \meaning |\endcsname\Lbar
+\expandafter\let\csname\??mathleft \meaning ‖\endcsname\Ldoublebar
+\expandafter\let\csname\??mathleft \meaning ⦀\endcsname\Ltriplebar
+\expandafter\let\csname\??mathleft \meaning /\endcsname\Lsolidus
+\expandafter\let\csname\??mathleft \meaning .\endcsname\Lnothing
+
+\expandafter\let\csname\??mathright\meaning ]\endcsname\Rbracket
+\expandafter\let\csname\??mathright\meaning )\endcsname\Rparent
+\expandafter\let\csname\??mathright\meaning >\endcsname\Rangle
+\expandafter\let\csname\??mathright\meaning ⟩\endcsname\Rangle
+\expandafter\let\csname\??mathright\meaning ⟫\endcsname\Rdoubleangle
+\expandafter\let\csname\??mathright\meaning }\endcsname\Rbrace
+\expandafter\let\csname\??mathright\meaning |\endcsname\Rbar
+\expandafter\let\csname\??mathright\meaning ‖\endcsname\Rdoublebar
+\expandafter\let\csname\??mathright\meaning ⦀\endcsname\Rtriplebar
+\expandafter\let\csname\??mathright\meaning /\endcsname\Rsolidus
+\expandafter\let\csname\??mathright\meaning .\endcsname\Rnothing
+
+\expandafter\let\csname\??mathright\meaning [\endcsname\Lbracketmirrored
+\expandafter\let\csname\??mathright\meaning (\endcsname\Lparentmirrored
+\expandafter\let\csname\??mathright\meaning <\endcsname\Langlemirrored
+\expandafter\let\csname\??mathright\meaning ⟨\endcsname\Langlemirrored
+\expandafter\let\csname\??mathright\meaning ⟪\endcsname\Ldoubleanglemirrored
+\expandafter\let\csname\??mathright\meaning {\endcsname\Lbracemirrored
+%expandafter\let\csname\??mathright\meaning |\endcsname\Lbarmirrored
+%expandafter\let\csname\??mathright\meaning ‖\endcsname\Ldoublebarmirrored
+%expandafter\let\csname\??mathright\meaning ⦀\endcsname\Ltriplebarmirrored
+\expandafter\let\csname\??mathright\meaning /\endcsname\Lsolidusmirrored
+\expandafter\let\csname\??mathright\meaning .\endcsname\Lnothingmirrored
+
+\expandafter\let\csname\??mathleft \meaning ]\endcsname\Rbracketmirrored
+\expandafter\let\csname\??mathleft \meaning )\endcsname\Rparentmirrored
+\expandafter\let\csname\??mathleft \meaning >\endcsname\Ranglemirrored
+\expandafter\let\csname\??mathleft \meaning ⟩\endcsname\Ranglemirrored
+\expandafter\let\csname\??mathleft \meaning ⟫\endcsname\Rdoubleanglemirrored
+\expandafter\let\csname\??mathleft \meaning }\endcsname\Rbracemirrored
+%expandafter\let\csname\??mathleft \meaning |\endcsname\Rbarmirrored
+%expandafter\let\csname\??mathleft \meaning ‖\endcsname\Rdoublebarmirrored
+%expandafter\let\csname\??mathleft \meaning ⦀\endcsname\Rtriplebarmirrored
+\expandafter\let\csname\??mathleft \meaning /\endcsname\Rsolidusmirrored
+\expandafter\let\csname\??mathleft \meaning .\endcsname\Rnothingmirrored
+
+% todo paren parent
+
+\let\lbrack\lbracket
+\let\rbrack\rbracket
+
+\installmathfencepair \lbrace \Lbrace \rbrace \Rbrace
+\installmathfencepair \lbracket \Lbracket \rbracket \Rbracket
+\installmathfencepair \lparen \Lparen \rparen \Rparen
+\installmathfencepair \lparent \Lparent \rparent \Rparent
+\installmathfencepair \langle \Langle \rangle \Rangle
+%installmathfencepair \lrangle \Ldoubleangle \rrangle \Rdoubleangle
+%installmathfencepair \lbar \Lbar \rbar \Rbar
+\installmathfencepair \vert \Lbar \vert \Rbar
+\installmathfencepair \solidus \Lsolidus \solidus \Rsolidus
+
+\unexpanded\def\{{\mathortext\lbrace \letterleftbrace } % or maybe a chardef
+\unexpanded\def\}{\mathortext\rbrace \letterrightbrace } % or maybe a chardef
+\unexpanded\def\[{\mathortext\lbracket\letterleftbracket } % or maybe a chardef
+\unexpanded\def\]{\mathortext\rbracket\letterrightbracket} % or maybe a chardef
+\unexpanded\def\({\mathortext\lparent \letterleftparent } % or maybe a chardef
+\unexpanded\def\){\mathortext\rparent \letterrightparent } % or maybe a chardef
+\unexpanded\def\|{\mathortext\vert \letterbar } % or maybe a chardef
+%unexpanded\def\/{\mathortext\solidus \letterslash } % or maybe a chardef
+
+\installmathfencepair \{ \Lbrace \} \Rbrace
+\installmathfencepair \[ \Lbracket \] \Rbracket
+\installmathfencepair \( \Lparent \) \Rparent
+\installmathfencepair \< \Langle \> \Rangle
+\installmathfencepair \| \Lbar \| \Rbar
+
+%D As we have overloaded \type {\left} and \type {\right} we also need a more
+%D clever version of the following:
+
+% methods:
+%
+% 1: none
+% 2: lua
+% 3: tex
+
+% variants:
+%
+% 1: step 1
+% 2: step 2
+% 3: htdp * 1.33^n
+% 4: size * 1.33^n
+
+\setnewconstant\bigmathdelimitermethod \plusone
+\setnewconstant\bigmathdelimitervariant\plusthree
+
+\unexpanded\def\plainbigdelimiters % traditional method
+ {\bigmathdelimitermethod\plustwo}
+
+\plainbigdelimiters % is default for the moment but not so nice
+
+% \setconstant\bigmathdelimitermethod\plusone
+
+\installcorenamespace{mathbig}
+
+\unexpanded\def\choosemathbig#1#2% so we accept \big{||} as well
+ {{\hbox\bgroup
+ \startimath
+ \ifcase\bigmathdelimitermethod
+ \math_fenced_step#2\relax
+ \or
+ \attribute\mathsizeattribute\numexpr\bigmathdelimitervariant*\plushundred+#1\relax
+ \math_fenced_step#2\relax
+ \else
+ \math_fenced_step#2{\vbox to\getvalue{\??mathbig\number#1}\bodyfontsize{}}%
+ \fi
+ \nulldelimiterspace\zeropoint\relax
+ \mathsurround\zeropoint
+ \stopimath
+ \egroup}}
+
+\def\math_fenced_step#1#2%
+ {\setfalse\c_math_fenced_unknown
+ \setfalse\c_math_fenced_done
+ \left#1\relax
+ \ifconditional\c_math_fenced_done
+ #2%
+ \right.\relax
+ \else
+ \left.\relax
+ #2%
+ \setfalse\c_math_fenced_done
+ \right#1\relax
+ \ifconditional\c_math_fenced_done
+ \else
+ \right.\relax
+ \fi
+ \fi}
+
+\unexpanded\def\mathdelimiterstep#1#2%
+ {\begingroup
+ \attribute\mathsizeattribute\numexpr\plushundred+#1\relax
+ \math_fenced_step#2\relax
+ \endgroup}
+
+\setvalue{\??mathbig1}{0.85}
+\setvalue{\??mathbig2}{1.15}
+\setvalue{\??mathbig3}{1.45}
+\setvalue{\??mathbig4}{1.75}
+
+\definemathcommand [big] {\choosemathbig\plusone }
+\definemathcommand [Big] {\choosemathbig\plustwo }
+\definemathcommand [bigg] {\choosemathbig\plusthree}
+\definemathcommand [Bigg] {\choosemathbig\plusfour }
+
+\definemathcommand [bigl] [open] [one] {\big}
+\definemathcommand [bigm] [rel] [one] {\big}
+\definemathcommand [bigr] [close] [one] {\big}
+\definemathcommand [Bigl] [open] [one] {\Big}
+\definemathcommand [Bigm] [rel] [one] {\Big}
+\definemathcommand [Bigr] [close] [one] {\Big}
+\definemathcommand [biggl] [open] [one] {\bigg}
+\definemathcommand [biggm] [rel] [one] {\bigg}
+\definemathcommand [biggr] [close] [one] {\bigg}
+\definemathcommand [Biggl] [open] [one] {\Bigg}
+\definemathcommand [Biggm] [rel] [one] {\Bigg}
+\definemathcommand [Biggr] [close] [one] {\Bigg}
%definemathfence [fancybracket] [bracket] [command=yes,color=red]
+% experimental accents:
+%
+% \definemathoverextensible [top] [hoed] ["FE302]
+% \definemathoverextensible [top] [slang] ["FE303]
+
+%D This is needed for mathml (used in mrow, so it gets reset):
+
+\let\math_fences_saved_left \left
+\let\math_fences_saved_middle\middle
+\let\math_fences_saved_right \right
+
+% \def\math_fences_traced#1{\ruledhbox{\ttx#1\low{\the\c_math_fenced_nesting}}}
+
+\unexpanded\def\math_fences_checked_left
+ {%\math_fences_traced L%
+ \math_fences_saved_left}
+
+\unexpanded\def\math_fences_checked_middle
+ {%\math_fences_traced M%
+ \ifcase\c_math_fenced_nesting
+ \expandafter\math_fences_saved_middle
+ \else
+ \expandafter\firstofoneargument
+ \fi}
+
+\unexpanded\def\math_fences_checked_right
+ {%\math_fences_traced R%
+ \ifcase\c_math_fenced_nesting
+ \expandafter\firstofoneargument
+ \else
+ \expandafter\math_fences_saved_right
+ \fi}
+
+\newconditional\c_math_checked_done % only bars
+
+\unexpanded\def\math_fences_checked_left_or_right
+ {%\math_fences_traced B%
+ \ifcase\c_math_fenced_nesting
+ \settrue\c_math_checked_done
+ \expandafter\math_fences_saved_left
+ \else\ifconditional\c_math_checked_done
+ \setfalse\c_math_checked_done
+ \doubleexpandafter\math_fences_saved_right
+ \else
+ \doubleexpandafter\math_fences_saved_middle
+ \fi\fi}
+
+\unexpanded\def\math_fences_checked_start
+ {\c_math_fenced_nesting\zerocount}
+
+\unexpanded\def\math_fences_checked_stop
+ {\ifcase\c_math_fenced_nesting\else
+ \right.\relax % error, todo: nil spacing
+ \expandafter\math_fences_checked_stop
+ \fi}
+
+\unexpanded\def\startcheckedfences
+ {\begingroup
+ \let\left \math_fences_checked_left
+ \let\middle\math_fences_checked_middle
+ \let\right \math_fences_checked_right
+ \math_fences_checked_start}
+
+\unexpanded\def\stopcheckedfences
+ {\math_fences_checked_stop
+ \endgroup}
+
+\let\leftorright\math_fences_checked_left_or_right % for bars
+
+%D The next characters were used for constructing nicer extensibles but
+%D nowadays we have real characters.
+
+\Umathchardef\braceld=0 \defaultmathfamily "FF07A
+\Umathchardef\bracerd=0 \defaultmathfamily "FF07B
+\Umathchardef\bracelu=0 \defaultmathfamily "FF07C
+\Umathchardef\braceru=0 \defaultmathfamily "FF07D
+
\protect
diff --git a/Master/texmf-dist/tex/context/base/math-frc.mkiv b/Master/texmf-dist/tex/context/base/math-frc.mkiv
index e11e79e8230..f4f3f2b84a1 100644
--- a/Master/texmf-dist/tex/context/base/math-frc.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-frc.mkiv
@@ -102,6 +102,7 @@
\setupmathfractions
[\c!mathstyle=,
\c!alternative=\v!inner,
+ \c!margin=\zeropoint,
\c!rulethickness=.25\exheight,
\c!left=0x2E,
\c!right=0x2E,
@@ -111,9 +112,12 @@
\setuevalue{\currentmathfraction}{\math_frac{\currentmathfraction}}%
\to \everydefinemathfraction
+\newdimen\d_math_fraction_margin
+
\unexpanded\def\math_frac#1%
{\begingroup
\edef\currentmathfraction{#1}%
+ \d_math_fraction_margin\mathfractionparameter\c!margin
\edef\p_math_fractions_color{\mathfractionparameter\c!color}%
\ifx\p_math_fractions_color\empty
\expandafter\math_frac_normal
@@ -139,19 +143,58 @@
\number\dimexpr\mathfractionparameter\c!rulethickness%
)}}
-\setvalue{\??mathfractionalternative\v!inner}#1#2%
+% Having a \withmarginornot{#1}{#2} makes not much sense nor do
+% 4 tests or 4 redundant kerns (longer node lists plus possible
+% interference). A split in normal and margin also makes testing
+% easier. When left and right margins are needed we might merge
+% the variants again. After all, these are not real installers.
+
+\setvalue{\??mathfractionalternative\v!inner}%
+ {\ifcase\d_math_fraction_margin
+ \expandafter\math_fraction_inner_normal
+ \else
+ \expandafter\math_fraction_inner_margin
+ \fi}
+
+\def\math_fraction_inner_normal#1#2%
{\Ustack{%
{\usemathstyleparameter\mathfractionparameter{#1}}% we should store this one
\math_frac_command
{\usemathstyleparameter\mathfractionparameter{#2}}% and reuse it here
}\endgroup}
-\setvalue{\??mathfractionalternative\v!outer}#1#2%
+\def\math_fraction_inner_margin#1#2%
+ {\Ustack{%
+ {\kern\d_math_fraction_margin
+ \usemathstyleparameter\mathfractionparameter{#1}% we should store this one
+ \kern\d_math_fraction_margin}%
+ \math_frac_command
+ {\kern\d_math_fraction_margin
+ \usemathstyleparameter\mathfractionparameter{#2}% and reuse it here
+ \kern\d_math_fraction_margin}%
+ }\endgroup}
+
+\setvalue{\??mathfractionalternative\v!outer}%
+ {\ifcase\d_math_fraction_margin
+ \expandafter\math_fraction_outer_normal
+ \else
+ \expandafter\math_fraction_outer_margin
+ \fi}
+
+\def\math_fraction_outer_normal#1#2%
{\Ustack{%
\usemathstyleparameter\mathfractionparameter
{{#1}\math_frac_command{#2}}%
}\endgroup}
+\def\math_fraction_outer_margin#1#2%
+ {\Ustack{%
+ \usemathstyleparameter\mathfractionparameter
+ {{\kern\d_math_fraction_margin#1\kern\d_math_fraction_margin}%
+ \math_frac_command
+ {\kern\d_math_fraction_margin#2\kern\d_math_fraction_margin}}%
+ }\endgroup}
+
\definemathfraction[frac][\c!mathstyle=]
\unexpanded\def\xfrac {\begingroup\let\xfrac\xxfrac\math_frac_alternative\scriptstyle}
@@ -231,7 +274,7 @@
%D \getbuffer
\unexpanded\def\cfrac
- {\doifnextoptionalelse\math_cfrac_yes\math_cfrac_nop}
+ {\doifnextoptionalcselse\math_cfrac_yes\math_cfrac_nop}
\def\math_cfrac_nop {\math_cfrac_indeed[cc]}
\def\math_cfrac_yes[#1]{\math_cfrac_indeed[#1cc]}
diff --git a/Master/texmf-dist/tex/context/base/math-ini.lua b/Master/texmf-dist/tex/context/base/math-ini.lua
index 7ba1d451463..9772ce5389d 100644
--- a/Master/texmf-dist/tex/context/base/math-ini.lua
+++ b/Master/texmf-dist/tex/context/base/math-ini.lua
@@ -16,19 +16,18 @@ if not modules then modules = { } end modules ['math-ini'] = {
-- then we also have to set the other characters (only a subset done now)
local formatters, find = string.formatters, string.find
-local utfchar, utfbyte = utf.char, utf.byte
-local setmathcode, setdelcode = tex.setmathcode, tex.setdelcode
-local settexattribute = tex.setattribute
+local utfchar, utfbyte, utflength = utf.char, utf.byte, utf.length
local floor = math.floor
-local context = context
+local context = context
+local commands = commands
-local contextsprint = context.sprint
-local contextfprint = context.fprint -- a bit inefficient
+local context_sprint = context.sprint
+----- context_fprint = context.fprint -- a bit inefficient
-local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
+local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
-local report_math = logs.reporter("mathematics","initializing")
+local report_math = logs.reporter("mathematics","initializing")
mathematics = mathematics or { }
local mathematics = mathematics
@@ -40,6 +39,10 @@ local unsetvalue = attributes.unsetvalue
local allocate = utilities.storage.allocate
local chardata = characters.data
+local texsetattribute = tex.setattribute
+local setmathcode = tex.setmathcode
+local setdelcode = tex.setdelcode
+
local families = allocate {
mr = 0,
mb = 1,
@@ -87,6 +90,7 @@ local classes = allocate {
large = 1, -- op
variable = 7, -- alphabetic
number = 7, -- alphabetic
+ root = 16, -- a private one
}
local open_class = 4
@@ -151,6 +155,10 @@ local function radical(family,slot)
return formatters['\\Uradical "%X "%X '](family,slot)
end
+local function root(family,slot)
+ return formatters['\\Uroot "%X "%X '](family,slot)
+end
+
local function mathchardef(name,class,family,slot)
return formatters['\\Umathchardef\\%s "%X "%X "%X '](name,class,family,slot)
end
@@ -191,29 +199,42 @@ local setmathcharacter = function(class,family,slot,unicode,mset,dset)
return mset, dset
end
+local f_accent = formatters[ [[\ugdef\%s{\Umathaccent 0 "%X "%X }]] ]
+local f_topaccent = formatters[ [[\ugdef\%s{\Umathaccent 0 "%X "%X }]] ]
+local f_botaccent = formatters[ [[\ugdef\%s{\Umathbotaccent 0 "%X "%X }]] ]
+local f_over = formatters[ [[\ugdef\%s{\Udelimiterover "%X "%X }]] ]
+local f_under = formatters[ [[\ugdef\%s{\Udelimiterunder "%X "%X }]] ]
+local f_fence = formatters[ [[\ugdef\%s{\Udelimiter "%X "%X "%X }]] ]
+local f_delimiter = formatters[ [[\ugdef\%s{\Udelimiter 0 "%X "%X }]] ]
+local f_radical = formatters[ [[\ugdef\%s{\Uradical "%X "%X }]] ]
+local f_root = formatters[ [[\ugdef\%s{\Uroot "%X "%X }]] ]
+----- f_char = formatters[ [[\ugdef\%s{\Umathchar "%X "%X "%X }]]
+local f_char = formatters[ [[\Umathchardef\%s "%X "%X "%X ]] ]
+
local setmathsymbol = function(name,class,family,slot) -- hex is nicer for tracing
if class == classes.accent then
- contextsprint(formatters[ [[\ugdef\%s{\Umathaccent 0 "%X "%X }]] ](name,family,slot))
+ context_sprint(f_accent(name,family,slot))
elseif class == classes.topaccent then
- contextsprint(formatters[ [[\ugdef\%s{\Umathaccent 0 "%X "%X }]] ](name,family,slot))
+ context_sprint(f_topaccent(name,family,slot))
elseif class == classes.botaccent then
- contextsprint(formatters[ [[\ugdef\%s{\Umathbotaccent 0 "%X "%X }]] ](name,family,slot))
+ context_sprint(f_botaccent(name,family,slot))
elseif class == classes.over then
- contextsprint(formatters[ [[\ugdef\%s{\Udelimiterover "%X "%X }]] ](name,family,slot))
+ context_sprint(f_over(name,family,slot))
elseif class == classes.under then
- contextsprint(formatters[ [[\ugdef\%s{\Udelimiterunder "%X "%X }]] ](name,family,slot))
+ context_sprint(f_under(name,family,slot))
elseif class == open_class or class == close_class or class == middle_class then
setdelcode("global",slot,{family,slot,0,0})
- contextsprint(formatters[ [[\ugdef\%s{\Udelimiter "%X "%X "%X }]] ](name,class,family,slot))
+ context_sprint(f_fence(name,class,family,slot))
elseif class == classes.delimiter then
setdelcode("global",slot,{family,slot,0,0})
- contextsprint(formatters[ [[\ugdef\%s{\Udelimiter 0 "%X "%X }]] ](name,family,slot))
+ context_sprint(f_delimiter(name,family,slot))
elseif class == classes.radical then
- contextsprint(formatters[ [[\ugdef\%s{\Uradical "%X "%X }]] ](name,family,slot))
+ context_sprint(f_radical(name,family,slot))
+ elseif class == classes.root then
+ context_sprint(f_root(name,family,slot))
else
-- beware, open/close and other specials should not end up here
- -- contextsprint(formatters[ [[\ugdef\%s{\Umathchar "%X "%X "%X }]],name,class,family,slot))
- contextsprint(formatters[ [[\Umathchardef\%s "%X "%X "%X ]] ](name,class,family,slot))
+ context_sprint(f_char(name,class,family,slot))
end
end
@@ -313,30 +334,57 @@ function mathematics.define(family)
end
-- needed for mathml analysis
-
+-- string with # > 1 are invalid
-- we could cache
+local lpegmatch = lpeg.match
+
+local utf8byte = lpeg.patterns.utf8byte * lpeg.P(-1)
+
+-- function somechar(c)
+-- local b = lpegmatch(utf8byte,c)
+-- return b and chardata[b]
+-- end
+
+
+local somechar = { }
+
+table.setmetatableindex(somechar,function(t,k)
+ if k then
+ local b = lpegmatch(utf8byte,k)
+ local v = b and chardata[b] or false
+ t[k] = v
+ return v
+ end
+end)
+
local function utfmathclass(chr, default)
- local cd = chardata[utfbyte(chr)]
+ local cd = somechar[chr]
return cd and cd.mathclass or default or "unknown"
end
-local function utfmathaccent(chr,default,asked)
- local cd = chardata[utfbyte(chr)]
+local function utfmathaccent(chr,default,asked1,asked2)
+ local cd = somechar[chr]
if not cd then
return default or false
end
- if asked then
+ if asked1 and asked1 ~= "" then
local mc = cd.mathclass
- if mc and mc == asked then
+ if mc and (mc == asked1 or mc == asked2) then
return true
end
local ms = cd.mathspec
+ if not ms then
+ local mp = cd.mathparent
+ if mp then
+ ms = chardata[mp].mathspec
+ end
+ end
if ms then
for i=1,#ms do
local msi = ms[i]
local mc = msi.class
- if mc and mc == asked then
+ if mc and (mc == asked1 or mc == asked2) then
return true
end
end
@@ -360,32 +408,38 @@ local function utfmathaccent(chr,default,asked)
return default or false
end
-local function utfmathstretch(chr, default) -- "h", "v", "b", ""
- local cd = chardata[utfbyte(chr)]
+local function utfmathstretch(chr,default) -- "h", "v", "b", ""
+ local cd = somechar[chr]
return cd and cd.mathstretch or default or ""
end
-local function utfmathcommand(chr,default,asked)
--- local cd = chardata[utfbyte(chr)]
--- local cmd = cd and cd.mathname
--- return cmd or default or ""
- local cd = chardata[utfbyte(chr)]
+local function utfmathcommand(chr,default,asked1,asked2)
+ local cd = somechar[chr]
if not cd then
return default or ""
end
- if asked then
+ if asked1 then
local mn = cd.mathname
local mc = cd.mathclass
- if mn and mc and mc == asked then
+ if mn and mc and (mc == asked1 or mc == asked2) then
return mn
end
local ms = cd.mathspec
+ if not ms then
+ local mp = cd.mathparent
+ if mp then
+ ms = chardata[mp].mathspec
+ end
+ end
if ms then
for i=1,#ms do
local msi = ms[i]
local mn = msi.name
- if mn and msi.class == asked then
- return mn
+ if mn then
+ local mc = msi.class
+ if mc == asked1 or mc == asked2 then
+ return mn
+ end
end
end
end
@@ -409,7 +463,7 @@ local function utfmathcommand(chr,default,asked)
end
local function utfmathfiller(chr, default)
- local cd = chardata[utfbyte(chr)]
+ local cd = somechar[chr]
local cmd = cd and (cd.mathfiller or cd.mathname)
return cmd or default or ""
end
@@ -418,6 +472,7 @@ mathematics.utfmathclass = utfmathclass
mathematics.utfmathstretch = utfmathstretch
mathematics.utfmathcommand = utfmathcommand
mathematics.utfmathfiller = utfmathfiller
+mathematics.utfmathaccent = utfmathaccent
-- interfaced
@@ -430,17 +485,31 @@ function commands.doifelseutfmathaccent(chr,asked)
commands.doifelse(utfmathaccent(chr,nil,asked))
end
+function commands.utfmathcommandabove(asked) context(utfmathcommand(asked,nil,"topaccent","over" )) end
+function commands.utfmathcommandbelow(asked) context(utfmathcommand(asked,nil,"botaccent","under")) end
+
+function commands.doifelseutfmathabove(chr) commands.doifelse(utfmathaccent(chr,nil,"topaccent","over" )) end
+function commands.doifelseutfmathbelow(chr) commands.doifelse(utfmathaccent(chr,nil,"botaccent","under")) end
+
-- helpers
+--
+-- 1: step 1
+-- 2: step 2
+-- 3: htdp * 1.33^n
+-- 4: size * 1.33^n
-function mathematics.big(tfmdata,unicode,n)
+function mathematics.big(tfmdata,unicode,n,method)
local t = tfmdata.characters
local c = t[unicode]
- if c then
+ if c and n > 0 then
local vv = c.vert_variants or c.next and t[c.next].vert_variants
if vv then
local vvn = vv[n]
return vvn and vvn.glyph or vv[#vv].glyph or unicode
- else
+ elseif method == 1 or method == 2 then
+ if method == 2 then -- large steps
+ n = n * 2
+ end
local next = c.next
while next do
if n <= 1 then
@@ -455,6 +524,27 @@ function mathematics.big(tfmdata,unicode,n)
end
end
end
+ else
+ local size = 1.33^n
+ if method == 4 then
+ size = tfmdata.parameters.size * size
+ else -- if method == 3 then
+ size = (c.height + c.depth) * size
+ end
+ local next = c.next
+ while next do
+ local cn = t[next]
+ if (cn.height + cn.depth) >= size then
+ return next
+ else
+ local tn = cn.next
+ if tn then
+ next = tn
+ else
+ return next
+ end
+ end
+ end
end
end
return unicode
@@ -491,10 +581,10 @@ end
--
-- function commands.taggedmathfunction(tag,label)
-- if label then
--- settexattribute(a_mathcategory,registercategory(1,tag,tag))
+-- texsetattribute(a_mathcategory,registercategory(1,tag,tag))
-- context.mathlabeltext(tag)
-- else
--- settexattribute(a_mathcategory,1)
+-- texsetattribute(a_mathcategory,1)
-- context(tag)
-- end
-- end
@@ -517,13 +607,13 @@ function commands.taggedmathfunction(tag,label,apply)
noffunctions = noffunctions + 1
functions[noffunctions] = tag
functions[tag] = noffunctions
- settexattribute(a_mathcategory,noffunctions + delta)
+ texsetattribute(a_mathcategory,noffunctions + delta)
else
- settexattribute(a_mathcategory,n + delta)
+ texsetattribute(a_mathcategory,n + delta)
end
context.mathlabeltext(tag)
else
- settexattribute(a_mathcategory,1000 + delta)
+ texsetattribute(a_mathcategory,1000 + delta)
context(tag)
end
end
@@ -542,6 +632,6 @@ function commands.resetmathattributes()
end
end
for i=1,#list do
- settexattribute(list[i],unsetvalue)
+ texsetattribute(list[i],unsetvalue)
end
end
diff --git a/Master/texmf-dist/tex/context/base/math-ini.mkiv b/Master/texmf-dist/tex/context/base/math-ini.mkiv
index 81ec339d658..dcd2a5c3343 100644
--- a/Master/texmf-dist/tex/context/base/math-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-ini.mkiv
@@ -44,6 +44,14 @@
\registerctxluafile{math-noa}{1.001}
\registerctxluafile{math-tag}{1.001}
\registerctxluafile{math-fbk}{1.001}
+\registerctxluafile{math-dir}{1.001}
+
+%D A few compatibility helpers:
+
+\def\Umathbotaccent{\Umathaccent \s!bottom }
+\def\Umathaccents {\Umathaccent \s!both }
+
+%D The attributes that we will use:
\definesystemattribute[mathalphabet] [public]
\definesystemattribute[mathsize] [public]
@@ -54,6 +62,7 @@
\definesystemattribute[mathcategory] [public]
\definesystemattribute[mathmode] [public]
\definesystemattribute[mathitalics] [public]
+\definesystemattribute[mathbidi] [public]
\definesystemattribute[displaymath] [public]
@@ -108,7 +117,7 @@
\installswitchcommandhandler \??mathematics {mathematics} \??mathematics
\unexpanded\def\startmathematics % no grouping, if ever then also an optional second
- {\doifnextoptionalelse\math_mathematics_start_yes\math_mathematics_start_nop}
+ {\doifnextoptionalcselse\math_mathematics_start_yes\math_mathematics_start_nop}
\unexpanded\def\math_mathematics_start_yes[#1]%
{\pushmacro\currentmathematics
@@ -277,9 +286,15 @@
\def\utfmathclassfiltered #1#2{\ctxcommand{utfmathclass (\!!bs#1\!!es,nil,"#2")}}
\def\utfmathcommandfiltered#1#2{\ctxcommand{utfmathcommand(\!!bs#1\!!es,nil,"#2")}}
+\def\utfmathcommandabove#1{\ctxcommand{utfmathcommandabove(\!!bs#1\!!es)}}
+\def\utfmathcommandbelow#1{\ctxcommand{utfmathcommandbelow(\!!bs#1\!!es)}}
+
\unexpanded\def\doifelseutfmathaccent #1{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es)}}
\unexpanded\def\doifelseutfmathaccentfiltered#1#2{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es,"#2")}}
+\unexpanded\def\doifelseutfmathabove #1{\ctxcommand{doifelseutfmathabove(\!!bs#1\!!es)}}
+\unexpanded\def\doifelseutfmathbelow #1{\ctxcommand{doifelseutfmathbelow(\!!bs#1\!!es)}}
+
%D Not used that much:
\installcorenamespace{mathcodecommand}
@@ -349,6 +364,46 @@
\unexpanded\def\mathcommand#1%
{\csname\??mathcommand#1\endcsname}
+%D Let's define a few comands here:
+
+%definemathcommand [mathstrut] {\vphantom{(}}
+%definemathcommand [joinrel] {\mathrel{\mkern-3mu}}
+\definemathcommand [joinrel] [rel] {\mkern-3mu}
+
+\chardef\c_math_strut"28
+
+\unexpanded\def\math_strut_htdp#1%
+ {\s!height\fontcharht#1\c_math_strut
+ \s!depth \fontchardp#1\c_math_strut}
+
+\unexpanded\def\math_strut_normal
+ {\vrule
+ \normalexpanded{\math_strut_htdp{\mathstylefont\normalmathstyle}}%
+ \s!width \zeropoint
+ \relax}
+
+\unexpanded\def\math_strut_visual
+ {\hskip-.01\emwidth
+ \vrule
+ \normalexpanded{\math_strut_htdp{\mathstylefont\normalmathstyle}}%
+ \s!width .02\emwidth
+ \relax
+ \hskip-.01\emwidth}
+
+\unexpanded\def\showmathstruts % let's not overload \nath_strut_normal
+ {\let\math_strut\math_strut_visual}
+
+\let\math_strut\math_strut_normal
+
+% \unexpanded\def\mathstrut{\mathcodecommand{nothing}{\math_strut}}
+
+\definemathcommand [mathstrut] {\math_strut}
+
+%D We could have a arg variant \unknown\ but not now.
+
+\unexpanded\def\mathopwithlimits#1#2{\mathop{#1{#2}}\limits}
+\unexpanded\def\stackrel #1#2{\mathrel{\mathop{#2}\limits^{#1}}}
+
%D Moved from font-ini.mkiv:
%D
%D \macros
@@ -388,6 +443,17 @@
{\ifdefined\normalhbox\else\let\normalhbox\hbox\fi % ?
\let\hbox\mbox}
+\unexpanded\def\snappedmath#1% sort of \struttedbox
+ {\dontleavehmode
+ \begingroup
+ \setbox\scratchbox\hbox\bgroup
+ \startimath#1\stopimath
+ \egroup
+ \ht\scratchbox\strutht
+ \dp\scratchbox\strutht
+ \box\scratchbox
+ \endgroup}
+
%D The next hack is needed needed for sine, cosine etc.
\let\mathfunction\firstofoneunexpanded
@@ -494,10 +560,6 @@
%D \tt test $\sin{(x^{\sin(x^{\sin(x)})})}$ test
%D \stoptyping
-%D Some goodies:
-
-\unexpanded\def\Angstrom{\nomathematics{\Aring}}
-
%D \macros
%D {nonknuthmode, donknuthmode}
%D
@@ -531,12 +593,13 @@
\def\activatemathcharacters
{\the\activatedmathcharacters}
-% \setnewconstant\primeasciicode 39 % '
+% beware, not runtime, so has to happen at format generation
\activatemathcharacter\circumflexasciicode
\activatemathcharacter\underscoreasciicode
\activatemathcharacter\ampersandasciicode
-\activatemathcharacter\primeasciicode
+
+% \activatemathcharacter\primeasciicode
% not used:
%
@@ -601,57 +664,57 @@
\Umathchardef\prime "0 "0 "2032
\fi
-\let\math_prime_indeed_normal\prime
-
-\appendtoks
- \let\math_prime_indeed_normal\prime % gets defined later
- \let\mathfontprime\prime % for tracing
- \let\prime\math_prime_indeed % so this is needed
-\to \everydump
-
-\unexpanded\def\math_prime_indeed
- {\iffontchar\textfont\zerocount"FE325\relax
- ^\bgroup
- \expandafter\math_prime_indeed_virtual % virtual mess (using funny signal)
- \else
- % \expandafter\math_prime_indeed_normal % gets collapsed
- \expandafter\math_prime_indeed_crapped % gets collapsed
- \fi}
-
-\def\math_prime_indeed_crapped
- {{^{\math_prime_indeed_normal}}}
-
-\let\prime\math_prime_indeed
-
-\def\math_prime_indeed_virtual
- {\math_prime_indeed_normal
- \futurelet\nexttoken\math_prime_indeed_choice}
-
-\installcorenamespace{mathprime}
-
-\def\math_prime_indeed_choice
- {\csname\??mathprime
- \ifx '\nexttoken a\else
- \ifx \math_prime_indeed_normal\nexttoken a\else
- \ifx \prime\nexttoken a\else
- \ifx\superscriptcircumflextoken\nexttoken b\else
- \ifx\othercircumflextoken \nexttoken b\else
- c\fi\fi\fi\fi\fi
- \endcsname}
-
-\setvalue{\??mathprime a}#1{\math_prime_indeed_virtual}
-\setvalue{\??mathprime b}#1#2{#2\egroup}
-\setvalue{\??mathprime c}{\egroup}
-
-\let\activemathprime\math_prime_indeed
-
-\bgroup
-
- \catcode\primeasciicode\activecatcode
-
- \global\everymathematics\expandafter{\the\everymathematics\let'\math_prime_indeed} % todo: do this at the lua end
-
-\egroup
+% \let\math_prime_indeed_normal\prime
+%
+% \appendtoks
+% \let\math_prime_indeed_normal\prime % gets defined later
+% \let\mathfontprime\prime % for tracing
+% \let\prime\math_prime_indeed % so this is needed
+% \to \everydump
+%
+% \unexpanded\def\math_prime_indeed
+% {\iffontchar\textfont\zerocount"FE325\relax
+% ^\bgroup
+% \expandafter\math_prime_indeed_virtual % virtual mess (using funny signal)
+% \else
+% % \expandafter\math_prime_indeed_normal % gets collapsed
+% \expandafter\math_prime_indeed_crapped % gets collapsed
+% \fi}
+%
+% \def\math_prime_indeed_crapped
+% {{^{\math_prime_indeed_normal}}}
+%
+% % \let\prime\math_prime_indeed
+%
+% \def\math_prime_indeed_virtual
+% {\math_prime_indeed_normal
+% \futurelet\nexttoken\math_prime_indeed_choice}
+%
+% \installcorenamespace{mathprime}
+%
+% \def\math_prime_indeed_choice
+% {\csname\??mathprime
+% \ifx '\nexttoken a\else
+% \ifx \math_prime_indeed_normal\nexttoken a\else
+% \ifx \prime\nexttoken a\else
+% \ifx\superscriptcircumflextoken\nexttoken b\else
+% \ifx\othercircumflextoken \nexttoken b\else
+% c\fi\fi\fi\fi\fi
+% \endcsname}
+%
+% \setvalue{\??mathprime a}#1{\math_prime_indeed_virtual}
+% \setvalue{\??mathprime b}#1#2{#2\egroup}
+% \setvalue{\??mathprime c}{\egroup}
+%
+% \let\activemathprime\math_prime_indeed
+%
+% \bgroup
+%
+% \catcode\primeasciicode\activecatcode
+%
+% \global\everymathematics\expandafter{\the\everymathematics\let'\math_prime_indeed} % todo: do this at the lua end
+%
+% \egroup
\bgroup
@@ -672,8 +735,8 @@
\newconditional \knuthmode
-\let\nonknuthmode\relax
-\let\donknuthmode\relax
+\let\nonknuthmode\relax % no longer needed in MkIV
+\let\donknuthmode\relax % no longer needed in MkIV
% \def\nonknuthmode
% {\pushcatcodetable
@@ -784,7 +847,34 @@
\setupmathematics
[\c!compact=no]
-%D Arabic:
+% \enabletrackers[typesetters.directions.math]
+
+%D Right||to||left typesetting in math is supported by the \type {align} parameter
+%D with as option the \type {bidi} parameter. Of course support for special symbols
+%D like square roots depends on the font as well. We probably need to mirror a few
+%D more characters.
+%D
+%D \startbuffer
+%D \removeunwantedspaces
+%D \m{ ( 1 = 1) }\quad
+%D \m{ (123 = 123) }\quad
+%D \m{ a ( 1 = 1) b }\quad
+%D \m{ a (123 = 123) b }\quad
+%D \m{ x = 123 y + (1 / \sqrt {x}) }
+%D \stopbuffer
+%D
+%D \typebuffer
+%D
+%D \starttabulate[|T|T||]
+%D \HL
+%D \NC align \NC bidi \NC \NC \NR
+%D \HL
+%D \NC l2r \NC no \NC \setupmathematics[bidi=no] \getbuffer \NC \NR
+%D \NC l2r \NC yes \NC \setupmathematics[bidi=yes] \getbuffer \NC \NR
+%D \NC r2l \NC no \NC \setupmathematics[align=r2l,bidi=no] \getbuffer \NC \NR
+%D \NC r2l \NC yes \NC \setupmathematics[align=r2l,bidi=yes] \getbuffer \NC \NR
+%D \HL
+%D \stoptabulate
\newconditional\c_math_right_to_left
@@ -797,9 +887,28 @@
\appendtoks
\math_basics_synchronize_direction
-%to \everymathematics % comes too late and I'm not in the mood for a mixed mode kludge now
+%to \everymathematics % comes too late and I'm not in the mood for a mixed mode kludge now (should be a property of beginmath nodes and passed to callbacks)
+\to \everyswitchmathematics
+
+% experimental (needed for an article)
+
+\installcorenamespace {mathbidi}
+
+\newcount\c_math_bidi
+
+\setvalue{\??mathbidi\v!no }{\ctxcommand{setmathdirection(0)}\c_math_bidi\attributeunsetvalue}
+\setvalue{\??mathbidi\v!yes}{\ctxcommand{setmathdirection(1)}\c_math_bidi\plusone}
+
+\appendtoks
+ \edef\p_bidi{\mathematicsparameter\c!bidi}%
+ \csname\??mathbidi\ifcsname\??mathbidi\p_bidi\endcsname\p_bidi\else\v!no\fi\endcsname
+\to \everysetupmathematics
+
+\appendtoks
+ \attribute\mathbidiattribute\ifconditional\c_math_right_to_left\c_math_bidi\else\attributeunsetvalue\fi
\to \everyswitchmathematics
+
%D Delayed: greek.
%D
%D \starttyping
@@ -1008,6 +1117,9 @@
% \global\mathcode\c_math_period\c_math_special
% \to \everyjob
+% \activatemathcharacter\c_math_comma
+% \activatemathcharacter\c_math_period
+
\appendtoks
\mathcode\c_math_comma \c_math_special
\mathcode\c_math_period\c_math_special
@@ -1153,6 +1265,20 @@
\expandafter#3\else
\expandafter#1\fi}
+\unexpanded\def\verbosemathstyle#1% #1 is number (\normalmathstyle)
+ {{\normalexpanded{\relax\darkgray\ttxx[\number#1:\ifcase\numexpr#1\relax
+ display\or % 0
+ crampeddisplay\or % 1
+ text\or % 2
+ crampedtext\or % 3
+ script\or % 4
+ crampedscript\or % 5
+ scriptscript\or % 6
+ crampedscriptscript\else % 7
+ unknown\fi]}}}
+
+\unexpanded\def\showmathstyle{\verbosemathstyle\normalmathstyle}
+
%D A plain inheritance:
\def\mathpalette#1#2%
@@ -1170,7 +1296,9 @@
%D
%D \typebuffer \getbuffer
-\unexpanded\def\mathstylehbox#1%
+% to be tested: {#1} but it could have side effects
+
+\unexpanded\def\mathstylehbox#1% sensitive for: a \over b => {a\over b} or \frac{a}{b}
{\normalexpanded{\hbox\bgroup
\startimath\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1\stopimath\egroup}
@@ -1616,6 +1744,7 @@
\def\math_text_choice% if needed we can get rid of the normalize (predo in font code)
{\normalizebodyfontsize\m_math_text_choice_face{\mathstyleface\normalmathstyle}%
+ %\showmathstyle
\hbox\bgroup\font_basics_switchtobodyfont\m_math_text_choice_face\let\next}
%D Safeguard against redefinitions:
@@ -1639,7 +1768,7 @@
%
% \def\displ@y
% {\global\dt@ptrue
-% \openup\displayopenupvalue % was \openup\jot
+% \math_openup\displayopenupvalue % was \openup\jot
% \everycr
% {\noalign
% {\ifdt@p
@@ -1660,7 +1789,7 @@
\unexpanded\def\math_display_align_hack % I don't like the global, maybe we should push and pop
{\global\let\math_display_align_hack_indeed\math_display_align_hack_remove_skip
- \openup\displayopenupvalue % was \openup\jot
+ \math_openup\displayopenupvalue % was \math_openup\jot
\everycr{\noalign{\math_display_align_hack_indeed}}}
\def\math_display_align_hack_remove_skip
@@ -1723,3 +1852,20 @@
\def\mathhorizontalcode#1#2{\ctxcommand{horizontalcode(\number#1,\number#2)}}
\protect \endinput
+
+% % not used (yet)
+%
+% \newtoks \everystartimath
+% \newtoks \everystopimath
+%
+% \unexpanded\def\startimath{\Ustartmath\the\everystartimath}
+% \unexpanded\def\stopimath {\the\everystopimath\Ustopmath}
+%
+% \unexpanded\def\m%
+% {\relax
+% \ifmmode\expandafter\math_m_stay\else\expandafter\math_m_math\fi}
+%
+% \unexpanded\def\math_m_math#1%
+% {\startimath#1\stopimath}
+%
+% \let\math_m_stay\firstofoneargument
diff --git a/Master/texmf-dist/tex/context/base/math-map.lua b/Master/texmf-dist/tex/context/base/math-map.lua
index a0d7457d199..8d301ac3336 100644
--- a/Master/texmf-dist/tex/context/base/math-map.lua
+++ b/Master/texmf-dist/tex/context/base/math-map.lua
@@ -21,6 +21,8 @@ if not modules then modules = { } end modules ['math-map'] = {
-- todo: alphabets namespace
-- maybe: script/scriptscript dynamic,
+-- superscripped primes get unscripted !
+
-- to be looked into once the fonts are ready (will become font
-- goodie):
--
@@ -36,11 +38,15 @@ local merged = table.merged
local extract = bit32.extract
local allocate = utilities.storage.allocate
-local texattribute = tex.attribute
+
local otffeatures = fonts.constructors.newfeatures("otf")
local registerotffeature = otffeatures.register
+
local setmetatableindex = table.setmetatableindex
+local texgetattribute = tex.getattribute
+local texsetattribute = tex.setattribute
+
local trace_greek = false trackers.register("math.greek", function(v) trace_greek = v end)
local report_remapping = logs.reporter("mathematics","remapping")
@@ -54,30 +60,30 @@ local mathematics = mathematics
-- it otherwise.
mathematics.gaps = {
- [0x1D455] = 0x0210E, -- H
- [0x1D49D] = 0x0212C, -- script B
- [0x1D4A0] = 0x02130, -- script E
- [0x1D4A1] = 0x02131, -- script F
- [0x1D4A3] = 0x0210B, -- script H
- [0x1D4A4] = 0x02110, -- script I
- [0x1D4A7] = 0x02112, -- script L
- [0x1D4A8] = 0x02133, -- script M
- [0x1D4AD] = 0x0211B, -- script R
- [0x1D4BA] = 0x0212F, -- script e
- [0x1D4BC] = 0x0210A, -- script g
- [0x1D4C4] = 0x02134, -- script o
- [0x1D506] = 0x0212D, -- fraktur C
- [0x1D50B] = 0x0210C, -- fraktur H
- [0x1D50C] = 0x02111, -- fraktur I
- [0x1D515] = 0x0211C, -- fraktur R
- [0x1D51D] = 0x02128, -- fraktur Z
- [0x1D53A] = 0x02102, -- bb C
- [0x1D53F] = 0x0210D, -- bb H
- [0x1D545] = 0x02115, -- bb N
- [0x1D547] = 0x02119, -- bb P
- [0x1D548] = 0x0211A, -- bb Q
- [0x1D549] = 0x0211D, -- bb R
- [0x1D551] = 0x02124, -- bb Z
+ [0x1D455] = 0x0210E, -- ℎ h
+ [0x1D49D] = 0x0212C, -- ℬ script B
+ [0x1D4A0] = 0x02130, -- ℰ script E
+ [0x1D4A1] = 0x02131, -- ℱ script F
+ [0x1D4A3] = 0x0210B, -- ℋ script H
+ [0x1D4A4] = 0x02110, -- ℐ script I
+ [0x1D4A7] = 0x02112, -- ℒ script L
+ [0x1D4A8] = 0x02133, -- ℳ script M
+ [0x1D4AD] = 0x0211B, -- ℛ script R
+ [0x1D4BA] = 0x0212F, -- ℯ script e
+ [0x1D4BC] = 0x0210A, -- ℊ script g
+ [0x1D4C4] = 0x02134, -- ℴ script o
+ [0x1D506] = 0x0212D, -- ℭ fraktur C
+ [0x1D50B] = 0x0210C, -- ℌ fraktur H
+ [0x1D50C] = 0x02111, -- ℑ fraktur I
+ [0x1D515] = 0x0211C, -- ℜ fraktur R
+ [0x1D51D] = 0x02128, -- ℨ fraktur Z
+ [0x1D53A] = 0x02102, -- ℂ bb C
+ [0x1D53F] = 0x0210D, -- ℍ bb H
+ [0x1D545] = 0x02115, -- ℕ bb N
+ [0x1D547] = 0x02119, -- ℙ bb P
+ [0x1D548] = 0x0211A, -- ℚ bb Q
+ [0x1D549] = 0x0211D, -- ℝ bb R
+ [0x1D551] = 0x02124, -- ℤ bb Z
}
local function fillinmathgaps(tfmdata,key,value)
@@ -132,6 +138,7 @@ local regular_tf = {
},
symbols = {
[0x2202]=0x2202, [0x2207]=0x2207,
+ [0x0027]=0x2032, -- prime
},
}
@@ -164,6 +171,7 @@ local regular_it = {
},
symbols = {
[0x2202]=0x1D715, [0x2207]=0x1D6FB,
+ [0x0027]=0x2032, -- prime
},
}
@@ -189,6 +197,7 @@ local regular_bf= {
},
symbols = {
[0x2202]=0x1D6DB, [0x2207]=0x1D6C1,
+ [0x0027]=0x2032, -- prime
},
}
@@ -214,6 +223,7 @@ local regular_bi = {
},
symbols = {
[0x2202]=0x1D74F, [0x2207]=0x1D735,
+ [0x0027]=0x2032, -- prime
},
}
@@ -264,6 +274,7 @@ local sansserif_bf = {
},
symbols = {
[0x2202]=0x1D789, [0x2207]=0x1D76F,
+ [0x0027]=0x2032, -- prime
},
}
@@ -289,6 +300,7 @@ local sansserif_bi = {
},
symbols = {
[0x2202]=0x1D7C3, [0x2207]=0x1D7A9,
+ [0x0027]=0x2032, -- prime
},
}
@@ -333,7 +345,8 @@ local blackboard_tf = {
[0x0393]=0x0213E, [0x03A0]=0x0213F,
},
symbols = { -- sum
- [0x2211]=0x02140,
+ [0x2211]=0x02140,
+ [0x0027]=0x2032, -- prime
},
}
@@ -524,7 +537,7 @@ function mathematics.getboth(alphabet,style)
end
function mathematics.getstyle(style)
- local r = mathremap[texattribute[mathalphabet]]
+ local r = mathremap[texgetattribute(mathalphabet)]
local alphabet = r and r.alphabet or "regular"
local data = alphabets[alphabet][style]
return data and data.attribute
@@ -533,22 +546,22 @@ end
function mathematics.syncboth(alphabet,style)
local data = alphabet and alphabets[alphabet] or regular
data = style and data[style] or data.tf
- texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet]
+ texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet])
end
function mathematics.syncstyle(style)
- local r = mathremap[texattribute[mathalphabet]]
+ local r = mathremap[texgetattribute(mathalphabet)]
local alphabet = r and r.alphabet or "regular"
local data = alphabets[alphabet][style]
- texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet]
+ texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet])
end
function mathematics.syncname(alphabet)
-- local r = mathremap[mathalphabet]
- local r = mathremap[texattribute[mathalphabet]]
+ local r = mathremap[texgetattribute(mathalphabet)]
local style = r and r.style or "tf"
local data = alphabets[alphabet][style]
- texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet]
+ texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet])
end
local islcgreek = regular_tf.lcgreek
diff --git a/Master/texmf-dist/tex/context/base/math-noa.lua b/Master/texmf-dist/tex/context/base/math-noa.lua
index b309ba0777d..a7f0fcf55e6 100644
--- a/Master/texmf-dist/tex/context/base/math-noa.lua
+++ b/Master/texmf-dist/tex/context/base/math-noa.lua
@@ -20,6 +20,7 @@ if not modules then modules = { } end modules ['math-noa'] = {
local utfchar, utfbyte = utf.char, utf.byte
local formatters = string.formatters
+local div = math.div
local fonts, nodes, node, mathematics = fonts, nodes, node, mathematics
@@ -53,16 +54,34 @@ local report_families = logs.reporter("mathematics","families")
local a_mathrendering = attributes.private("mathrendering")
local a_exportstatus = attributes.private("exportstatus")
-local mlist_to_hlist = node.mlist_to_hlist
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+local tonut = nuts.tonut
+local nutstring = nuts.tostring
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local free_node = nuts.free
+local new_node = nuts.new -- todo: pool: math_noad math_sub
+local copy_node = nuts.copy
+
+local mlist_to_hlist = nodes.mlist_to_hlist
+
local font_of_family = node.family_font
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
-local free_node = node.free
-local new_node = node.new -- todo: pool: math_noad math_sub
-local new_kern = nodes.pool.kern
-local new_rule = nodes.pool.rule
-local concat_nodes = nodes.concat
+local new_kern = nodepool.kern
+local new_rule = nodepool.rule
local topoints = number.points
@@ -75,7 +94,8 @@ local fontemwidths = fonthashes.emwidths
local fontexheights = fonthashes.exheights
local variables = interfaces.variables
-local texattribute = tex.attribute
+local texsetattribute = tex.setattribute
+local texgetattribute = tex.getattribute
local unsetvalue = attributes.unsetvalue
local chardata = characters.data
@@ -118,27 +138,29 @@ local hlist_code = nodecodes.hlist
local glyph_code = nodecodes.glyph
local left_fence_code = 1
+local right_fence_code = 3
local function process(start,what,n,parent)
if n then n = n + 1 else n = 0 end
+ local prev = nil
while start do
- local id = start.id
+ local id = getid(start)
if trace_processing then
if id == math_noad then
- report_processing("%w%S, class %a",n*2,start,noadcodes[start.subtype])
+ report_processing("%w%S, class %a",n*2,nutstring(start),noadcodes[getsubtype(start)])
elseif id == math_char then
- local char = start.char
- local fam = start.fam
+ local char = getchar(start)
+ local fam = getfield(start,"fam")
local font = font_of_family(fam)
- report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,start,fam,font,char,char)
+ report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,nutstring(start),fam,font,char,char)
else
- report_processing("%w%S",n*2,start)
+ report_processing("%w%S",n*2,nutstring(start))
end
end
local proc = what[id]
if proc then
-- report_processing("start processing")
- local done, newstart = proc(start,what,n,parent) -- prev is bugged: or start.prev
+ local done, newstart = proc(start,what,n,parent) -- prev is bugged: or getprev(start)
if newstart then
start = newstart
-- report_processing("stop processing (new start)")
@@ -148,52 +170,57 @@ local function process(start,what,n,parent)
elseif id == math_char or id == math_textchar or id == math_delim then
break
elseif id == math_noad then
- local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
- noad = start.sup if noad then process(noad,what,n,start) end -- list
- noad = start.sub if noad then process(noad,what,n,start) end -- list
+if prev then
+ -- we have no proper prev in math nodes yet
+ setfield(start,"prev",prev)
+end
+
+ local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
elseif id == math_box or id == math_sub then
- -- local noad = start.list if noad then process(noad,what,n,start) end -- list
- local noad = start.head if noad then process(noad,what,n,start) end -- list
+ local noad = getfield(start,"list") if noad then process(noad,what,n,start) end -- list (not getlist !)
elseif id == math_fraction then
- local noad = start.num if noad then process(noad,what,n,start) end -- list
- noad = start.denom if noad then process(noad,what,n,start) end -- list
- noad = start.left if noad then process(noad,what,n,start) end -- delimiter
- noad = start.right if noad then process(noad,what,n,start) end -- delimiter
+ local noad = getfield(start,"num") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"denom") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"left") if noad then process(noad,what,n,start) end -- delimiter
+ noad = getfield(start,"right") if noad then process(noad,what,n,start) end -- delimiter
elseif id == math_choice then
- local noad = start.display if noad then process(noad,what,n,start) end -- list
- noad = start.text if noad then process(noad,what,n,start) end -- list
- noad = start.script if noad then process(noad,what,n,start) end -- list
- noad = start.scriptscript if noad then process(noad,what,n,start) end -- list
+ local noad = getfield(start,"display") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"text") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"script") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"scriptscript") if noad then process(noad,what,n,start) end -- list
elseif id == math_fence then
- local noad = start.delim if noad then process(noad,what,n,start) end -- delimiter
+ local noad = getfield(start,"delim") if noad then process(noad,what,n,start) end -- delimiter
elseif id == math_radical then
- local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
- noad = start.sup if noad then process(noad,what,n,start) end -- list
- noad = start.sub if noad then process(noad,what,n,start) end -- list
- noad = start.left if noad then process(noad,what,n,start) end -- delimiter
- noad = start.degree if noad then process(noad,what,n,start) end -- list
+ local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"left") if noad then process(noad,what,n,start) end -- delimiter
+ noad = getfield(start,"degree") if noad then process(noad,what,n,start) end -- list
elseif id == math_accent then
- local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
- noad = start.sup if noad then process(noad,what,n,start) end -- list
- noad = start.sub if noad then process(noad,what,n,start) end -- list
- noad = start.accent if noad then process(noad,what,n,start) end -- list
- noad = start.bot_accent if noad then process(noad,what,n,start) end -- list
+ local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"accent") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"bot_accent") if noad then process(noad,what,n,start) end -- list
elseif id == math_style then
-- has a next
else
-- glue, penalty, etc
end
- start = start.next
+prev = start
+ start = getnext(start)
end
end
local function processnoads(head,actions,banner)
if trace_processing then
report_processing("start %a",banner)
- process(head,actions)
+ process(tonut(head),actions)
report_processing("stop %a",banner)
else
- process(head,actions)
+ process(tonut(head),actions)
end
end
@@ -224,54 +251,112 @@ local familymap = { [0] =
"pseudobold",
}
+-- families[math_char] = function(pointer)
+-- if getfield(pointer,"fam") == 0 then
+-- local a = getattr(pointer,a_mathfamily)
+-- if a and a > 0 then
+-- setattr(pointer,a_mathfamily,0)
+-- if a > 5 then
+-- local char = getchar(pointer)
+-- local bold = boldmap[char]
+-- local newa = a - 3
+-- if bold then
+-- setattr(pointer,a_exportstatus,char)
+-- setfield(pointer,"char",bold)
+-- if trace_families then
+-- report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa])
+-- end
+-- else
+-- if trace_families then
+-- report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
+-- end
+-- end
+-- setfield(pointer,"fam",newa)
+-- else
+-- if trace_families then
+-- local char = getchar(pointer)
+-- report_families("family of %C becomes %s with remap %s",char,a,familymap[a])
+-- end
+-- setfield(pointer,"fam",a)
+-- end
+-- else
+-- -- pointer.fam = 0
+-- end
+-- end
+-- end
+
families[math_char] = function(pointer)
- if pointer.fam == 0 then
- local a = pointer[a_mathfamily]
+ if getfield(pointer,"fam") == 0 then
+ local a = getattr(pointer,a_mathfamily)
if a and a > 0 then
- pointer[a_mathfamily] = 0
+ setattr(pointer,a_mathfamily,0)
if a > 5 then
- local char = pointer.char
+ local char = getchar(pointer)
local bold = boldmap[char]
local newa = a - 3
- if bold then
- pointer[a_exportstatus] = char
- pointer.char = bold
+ if not bold then
if trace_families then
- report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa])
+ report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
+ end
+ setfield(pointer,"fam",newa)
+ elseif not fontcharacters[font_of_family(newa)][bold] then
+ if trace_families then
+ report_families("no bold character for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
+ end
+ if newa > 3 then
+ setfield(pointer,"fam",newa-3)
end
else
+ setattr(pointer,a_exportstatus,char)
+ setfield(pointer,"char",bold)
if trace_families then
- report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
+ report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa])
end
+ setfield(pointer,"fam",newa)
end
- pointer.fam = newa
else
- if trace_families then
- local char = pointer.char
- report_families("family of %C becomes %s with remap %s",char,a,familymap[a])
+ local char = getchar(pointer)
+ if not fontcharacters[font_of_family(a)][char] then
+ if trace_families then
+ report_families("no bold replacement for %C",char)
+ end
+ else
+ if trace_families then
+ report_families("family of %C becomes %s with remap %s",char,a,familymap[a])
+ end
+ setfield(pointer,"fam",a)
end
- pointer.fam = a
end
- else
- -- pointer.fam = 0
end
end
end
families[math_delim] = function(pointer)
- if pointer.small_fam == 0 then
- local a = pointer[a_mathfamily]
+ if getfield(pointer,"small_fam") == 0 then
+ local a = getattr(pointer,a_mathfamily)
if a and a > 0 then
- pointer[a_mathfamily] = 0
+ setattr(pointer,a_mathfamily,0)
if a > 5 then
-- no bold delimiters in unicode
a = a - 3
end
- pointer.small_fam = a
- pointer.large_fam = a
+ local char = getfield(pointer,"small_char")
+ local okay = fontcharacters[font_of_family(a)][char]
+ if okay then
+ setfield(pointer,"small_fam",a)
+ elseif a > 2 then
+ setfield(pointer,"small_fam",a-3)
+ end
+ local char = getfield(pointer,"large_char")
+ local okay = fontcharacters[font_of_family(a)][char]
+ if okay then
+ setfield(pointer,"large_fam",a)
+ elseif a > 2 then
+ setfield(pointer,"large_fam",a-3)
+ end
else
- pointer.small_fam = 0
- pointer.large_fam = 0
+ setfield(pointer,"small_fam",0)
+ setfield(pointer,"large_fam",0)
end
end
end
@@ -299,8 +384,8 @@ local fallbackstyleattr = mathematics.fallbackstyleattr
local setnodecolor = nodes.tracers.colors.set
local function checked(pointer)
- local char = pointer.char
- local fam = pointer.fam
+ local char = getchar(pointer)
+ local fam = getfield(pointer,"fam")
local id = font_of_family(fam)
local tc = fontcharacters[id]
if not tc[char] then
@@ -313,27 +398,27 @@ local function checked(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- pointer[a_exportstatus] = char -- testcase: exponentiale
- pointer.char = newchar
+ setattr(pointer,a_exportstatus,char) -- testcase: exponentiale
+ setfield(pointer,"char",newchar)
return true
end
end
end
processors.relocate[math_char] = function(pointer)
- local g = pointer[a_mathgreek] or 0
- local a = pointer[a_mathalphabet] or 0
+ local g = getattr(pointer,a_mathgreek) or 0
+ local a = getattr(pointer,a_mathalphabet) or 0
if a > 0 or g > 0 then
if a > 0 then
- pointer[a_mathgreek] = 0
+ setattr(pointer,a_mathgreek,0)
end
if g > 0 then
- pointer[a_mathalphabet] = 0
+ setattr(pointer,a_mathalphabet,0)
end
- local char = pointer.char
+ local char = getchar(pointer)
local newchar = remapalphabets(char,a,g)
if newchar then
- local fam = pointer.fam
+ local fam = getfield(pointer,"fam")
local id = font_of_family(fam)
local characters = fontcharacters[id]
if characters[newchar] then
@@ -343,7 +428,7 @@ processors.relocate[math_char] = function(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- pointer.char = newchar
+ setfield(pointer,"char",newchar)
return true
else
local fallback = fallbackstyleattr(a)
@@ -357,7 +442,7 @@ processors.relocate[math_char] = function(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- pointer.char = newchar
+ setfield(pointer,"char",newchar)
return true
elseif trace_remapping then
report_remap("char",id,char,newchar," fails (no fallback character)")
@@ -403,19 +488,19 @@ processors.render = { }
local rendersets = mathematics.renderings.numbers or { } -- store
processors.render[math_char] = function(pointer)
- local attr = pointer[a_mathrendering]
+ local attr = getattr(pointer,a_mathrendering)
if attr and attr > 0 then
- local char = pointer.char
+ local char = getchar(pointer)
local renderset = rendersets[attr]
if renderset then
local newchar = renderset[char]
if newchar then
- local fam = pointer.fam
+ local fam = getfield(pointer,"fam")
local id = font_of_family(fam)
local characters = fontcharacters[id]
if characters and characters[newchar] then
- pointer.char = newchar
- pointer[a_exportstatus] = char
+ setfield(pointer,"char",newchar)
+ setattr(pointer,a_exportstatus,char)
end
end
end
@@ -442,16 +527,20 @@ local mathsize = attributes.private("mathsize")
local resize = { } processors.resize = resize
resize[math_fence] = function(pointer)
- if pointer.subtype == left_fence_code then
- local a = pointer[mathsize]
+ local subtype = getsubtype(pointer)
+ if subtype == left_fence_code or subtype == right_fence_code then
+ local a = getattr(pointer,mathsize)
if a and a > 0 then
- pointer[mathsize] = 0
- local d = pointer.delim
- local df = d.small_fam
- local id = font_of_family(df)
- if id > 0 then
- local ch = d.small_char
- d.small_char = mathematics.big(fontdata[id],ch,a)
+ local method, size = div(a,100), a % 100
+ setattr(pointer,mathsize,0)
+ local delimiter = getfield(pointer,"delim")
+ local chr = getfield(delimiter,"small_char")
+ if chr > 0 then
+ local fam = getfield(delimiter,"small_fam")
+ local id = font_of_family(fam)
+ if id > 0 then
+ setfield(delimiter,"small_char",mathematics.big(fontdata[id],chr,size,method))
+ end
end
end
end
@@ -462,147 +551,34 @@ function handlers.resize(head,style,penalties)
return true
end
--- respacing
-
--- local mathpunctuation = attributes.private("mathpunctuation")
---
--- local respace = { } processors.respace = respace
-
--- only [nd,ll,ul][po][nd,ll,ul]
-
--- respace[math_char] = function(pointer,what,n,parent) -- not math_noad .. math_char ... and then parent
--- pointer = parent
--- if pointer and pointer.subtype == noad_ord then
--- local a = pointer[mathpunctuation]
--- if a and a > 0 then
--- pointer[mathpunctuation] = 0
--- local current_nucleus = pointer.nucleus
--- if current_nucleus.id == math_char then
--- local current_char = current_nucleus.char
--- local fc = chardata[current_char]
--- fc = fc and fc.category
--- if fc == "nd" or fc == "ll" or fc == "lu" then
--- local next_noad = pointer.next
--- if next_noad and next_noad.id == math_noad and next_noad.subtype == noad_punct then
--- local next_nucleus = next_noad.nucleus
--- if next_nucleus.id == math_char then
--- local next_char = next_nucleus.char
--- local nc = chardata[next_char]
--- nc = nc and nc.category
--- if nc == "po" then
--- local last_noad = next_noad.next
--- if last_noad and last_noad.id == math_noad and last_noad.subtype == noad_ord then
--- local last_nucleus = last_noad.nucleus
--- if last_nucleus.id == math_char then
--- local last_char = last_nucleus.char
--- local lc = chardata[last_char]
--- lc = lc and lc.category
--- if lc == "nd" or lc == "ll" or lc == "lu" then
--- local ord = new_node(math_noad) -- todo: pool
--- ord.subtype, ord.nucleus, ord.sub, ord.sup, ord.attr = noad_ord, next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr
--- -- next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr = nil, nil, nil, nil
--- next_noad.nucleus, next_noad.sub, next_noad.sup = nil, nil, nil -- else crash with attributes ref count
--- --~ next_noad.attr = nil
--- ord.next = last_noad
--- pointer.next = ord
--- free_node(next_noad)
--- end
--- end
--- end
--- end
--- end
--- end
--- end
--- end
--- end
--- end
--- end
-
--- local comma = 0x002C
--- local period = 0x002E
---
--- respace[math_char] = function(pointer,what,n,parent)
--- pointer = parent
--- if pointer and pointer.subtype == noad_punct then
--- local current_nucleus = pointer.nucleus
--- if current_nucleus.id == math_char then
--- local current_nucleus = pointer.nucleus
--- if current_nucleus.id == math_char then
--- local current_char = current_nucleus.char
--- local a = pointer[mathpunctuation]
--- if not a or a == 0 then
--- if current_char == comma then
--- -- default tex: 2,5 or 2, 5 --> 2, 5
--- elseif current_char == period then
--- -- default tex: 2.5 or 2. 5 --> 2.5
--- pointer.subtype = noad_ord
--- end
--- elseif a == 1 then
--- local next_noad = pointer.next
--- if next_noad and next_noad.id == math_noad then
--- local next_nucleus = next_noad.nucleus
--- if next_nucleus.id == math_char and next_nucleus.char == 0 then
--- nodes.remove(pointer,next_noad,true)
--- end
--- if current_char == comma then
--- -- default tex: 2,5 or 2, 5 --> 2, 5
--- elseif current_char == period then
--- -- default tex: 2.5 or 2. 5 --> 2.5
--- pointer.subtype = noad_ord
--- end
--- end
--- elseif a == 2 then
--- if current_char == comma or current_char == period then
--- local next_noad = pointer.next
--- if next_noad and next_noad.id == math_noad then
--- local next_nucleus = next_noad.nucleus
--- if next_nucleus.id == math_char and next_nucleus.char == 0 then
--- if current_char == comma then
--- -- adaptive: 2, 5 --> 2, 5
--- elseif current_char == period then
--- -- adaptive: 2. 5 --> 2. 5
--- end
--- nodes.remove(pointer,next_noad,true)
--- else
--- if current_char == comma then
--- -- adaptive: 2,5 --> 2,5
--- pointer.subtype = noad_ord
--- elseif current_char == period then
--- -- adaptive: 2.5 --> 2.5
--- pointer.subtype = noad_ord
--- end
--- end
--- end
--- end
--- end
--- end
--- end
--- end
--- end
---
--- function handlers.respace(head,style,penalties)
--- processnoads(head,respace,"respace")
--- return true
--- end
-
--- The following code is dedicated to Luigi Scarso who pointed me
--- to the fact that \not= is not producing valid pdf-a code.
--- The code does not solve this for virtual characters but it does
--- a decent job on collapsing so that fonts that have the right
--- glyph will have a decent unicode point. In the meantime this code
--- has been moved elsewhere.
-
local collapse = { } processors.collapse = collapse
local mathpairs = characters.mathpairs
-mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034 } -- (prime,prime) (prime,doubleprime)
-mathpairs[0x2033] = { [0x2032] = 0x2034 } -- (doubleprime,prime)
+mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034, [0x2034] = 0x2057 } -- (prime,prime) (prime,doubleprime) (prime,tripleprime)
+mathpairs[0x2033] = { [0x2032] = 0x2034, [0x2033] = 0x2057 } -- (doubleprime,prime) (doubleprime,doubleprime)
+mathpairs[0x2034] = { [0x2032] = 0x2057 } -- (tripleprime,prime)
+
+mathpairs[0x2035] = { [0x2035] = 0x2036, [0x2036] = 0x2037 } -- (reversedprime,reversedprime) (reversedprime,doublereversedprime)
+mathpairs[0x2036] = { [0x2035] = 0x2037 } -- (doublereversedprime,reversedprime)
mathpairs[0x222B] = { [0x222B] = 0x222C, [0x222C] = 0x222D }
mathpairs[0x222C] = { [0x222B] = 0x222D }
-mathpairs[0x007C] = { [0x007C] = 0x2016 } -- double bars
+mathpairs[0x007C] = { [0x007C] = 0x2016, [0x2016] = 0x2980 } -- bar+bar=double bar+double=triple
+mathpairs[0x2016] = { [0x007C] = 0x2980 } -- double+bar=triple
+
+local movesub = {
+ -- primes
+ [0x2032] = 0xFE932,
+ [0x2033] = 0xFE933,
+ [0x2034] = 0xFE934,
+ [0x2057] = 0xFE957,
+ -- reverse primes
+ [0x2035] = 0xFE935,
+ [0x2036] = 0xFE936,
+ [0x2037] = 0xFE937,
+}
local validpair = {
[noad_rel] = true,
@@ -612,48 +588,79 @@ local validpair = {
[noad_opnolimits] = true,
}
-local function collapsepair(pointer,what,n,parent) -- todo: switch to turn in on and off
+local function movesubscript(parent,current_nucleus,current_char)
+ local prev = getfield(parent,"prev")
+ if prev and getid(prev) == math_noad then
+ if not getfield(prev,"sup") and not getfield(prev,"sub") then
+ setfield(current_nucleus,"char",movesub[current_char or getchar(current_nucleus)])
+ -- {f} {'}_n => f_n^'
+ local nucleus = getfield(parent,"nucleus")
+ local sub = getfield(parent,"sub")
+ local sup = getfield(parent,"sup")
+ setfield(prev,"sup",nucleus)
+ setfield(prev,"sub",sub)
+ local dummy = copy_node(nucleus)
+ setfield(dummy,"char",0)
+ setfield(parent,"nucleus",dummy)
+ setfield(parent,"sub",nil)
+ if trace_collapsing then
+ report_collapsing("fixing subscript")
+ end
+ end
+ end
+end
+
+local function collapsepair(pointer,what,n,parent,nested) -- todo: switch to turn in on and off
if parent then
- if validpair[parent.subtype] then
- local current_nucleus = parent.nucleus
- if not parent.sub and not parent.sup and current_nucleus.id == math_char then
- local current_char = current_nucleus.char
- local mathpair = mathpairs[current_char]
- if mathpair then
- local next_noad = parent.next
- if next_noad and next_noad.id == math_noad then
- if validpair[next_noad.subtype] then
- local next_nucleus = next_noad.nucleus
- if next_nucleus.id == math_char then
- local next_char = next_nucleus.char
- local newchar = mathpair[next_char]
- if newchar then
- local fam = current_nucleus.fam
- local id = font_of_family(fam)
- local characters = fontcharacters[id]
- if characters and characters[newchar] then
- if trace_collapsing then
- report_collapsing("%U + %U => %U",current_char,next_char,newchar)
- end
- current_nucleus.char = newchar
- local next_next_noad = next_noad.next
- if next_next_noad then
- parent.next = next_next_noad
- next_next_noad.prev = parent
- else
- parent.next = nil
+ if validpair[getsubtype(parent)] then
+ local current_nucleus = getfield(parent,"nucleus")
+ if getid(current_nucleus) == math_char then
+ local current_char = getchar(current_nucleus)
+ if not getfield(parent,"sub") and not getfield(parent,"sup") then
+ local mathpair = mathpairs[current_char]
+ if mathpair then
+ local next_noad = getnext(parent)
+ if next_noad and getid(next_noad) == math_noad then
+ if validpair[getsubtype(next_noad)] then
+ local next_nucleus = getfield(next_noad,"nucleus")
+ if getid(next_nucleus) == math_char then
+ local next_char = getchar(next_nucleus)
+ local newchar = mathpair[next_char]
+ if newchar then
+ local fam = getfield(current_nucleus,"fam")
+ local id = font_of_family(fam)
+ local characters = fontcharacters[id]
+ if characters and characters[newchar] then
+ if trace_collapsing then
+ report_collapsing("%U + %U => %U",current_char,next_char,newchar)
+ end
+ setfield(current_nucleus,"char",newchar)
+ local next_next_noad = getnext(next_noad)
+ if next_next_noad then
+ setfield(parent,"next",next_next_noad)
+ setfield(next_next_noad,"prev",parent)
+ else
+ setfield(parent,"next",nil)
+ end
+ setfield(parent,"sup",getfield(next_noad,"sup"))
+ setfield(parent,"sub",getfield(next_noad,"sub"))
+ setfield(next_noad,"sup",nil)
+ setfield(next_noad,"sub",nil)
+ free_node(next_noad)
+ collapsepair(pointer,what,n,parent,true)
+ if not nested and movesub[current_char] then
+ movesubscript(parent,current_nucleus)
+ end
end
- parent.sup = next_noad.sup
- parent.sub = next_noad.sub
- next_noad.sup = nil
- next_noad.sub = nil
- free_node(next_noad)
- collapsepair(pointer,what,n,parent)
end
end
end
end
+ elseif not nested and movesub[current_char] then
+ movesubscript(parent,current_nucleus,current_char)
end
+ elseif not nested and movesub[current_char] then
+ movesubscript(parent,current_nucleus,current_char)
end
end
end
@@ -678,13 +685,13 @@ local replaced = { }
local function replace(pointer,what,n,parent)
pointer = parent -- we're following the parent list (chars trigger this)
- local next = pointer.next
+ local next = getnext(pointer)
local start_super, stop_super, start_sub, stop_sub
local mode = "unset"
- while next and next.id == math_noad do
- local nextnucleus = next.nucleus
- if nextnucleus and nextnucleus.id == math_char and not next.sub and not next.sup then
- local char = nextnucleus.char
+ while next and getid(next) == math_noad do
+ local nextnucleus = getfield(next,"nucleus")
+ if nextnucleus and getid(nextnucleus) == math_char and not getfield(next,"sub") and not getfield(next,"sup") then
+ local char = getchar(nextnucleus)
local s = superscripts[char]
if s then
if not start_super then
@@ -694,8 +701,8 @@ local function replace(pointer,what,n,parent)
break
end
stop_super = next
- next = next.next
- nextnucleus.char = s
+ next = getnext(next)
+ setfield(nextnucleus,"char",s)
replaced[char] = (replaced[char] or 0) + 1
if trace_normalizing then
report_normalizing("superscript %C becomes %C",char,s)
@@ -710,8 +717,8 @@ local function replace(pointer,what,n,parent)
break
end
stop_sub = next
- next = next.next
- nextnucleus.char = s
+ next = getnext(next)
+ setfield(nextnucleus,"char",s)
replaced[char] = (replaced[char] or 0) + 1
if trace_normalizing then
report_normalizing("subscript %C becomes %C",char,s)
@@ -726,29 +733,29 @@ local function replace(pointer,what,n,parent)
end
if start_super then
if start_super == stop_super then
- pointer.sup = start_super.nucleus
+ setfield(pointer,"sup",getfield(start_super,"nucleus"))
else
local list = new_node(math_sub) -- todo attr
- list.head = start_super
- pointer.sup = list
+ setfield(list,"list",start_super)
+ setfield(pointer,"sup",list)
end
if mode == "super" then
- pointer.next = stop_super.next
+ setfield(pointer,"next",getnext(stop_super))
end
- stop_super.next = nil
+ setfield(stop_super,"next",nil)
end
if start_sub then
if start_sub == stop_sub then
- pointer.sub = start_sub.nucleus
+ setfield(pointer,"sub",getfield(start_sub,"nucleus"))
else
local list = new_node(math_sub) -- todo attr
- list.head = start_sub
- pointer.sub = list
+ setfield(list,"list",start_sub)
+ setfield(pointer,"sub",list)
end
if mode == "sub" then
- pointer.next = stop_sub.next
+ setfield(pointer,"next",getnext(stop_sub))
end
- stop_sub.next = nil
+ setfield(stop_sub,"next",nil)
end
-- we could return stop
end
@@ -824,25 +831,25 @@ function mathematics.setalternate(fam,tag)
local mathalternates = tfmdata.shared and tfmdata.shared.mathalternates
if mathalternates then
local m = mathalternates[tag]
- tex.attribute[a_mathalternate] = m and m.attribute or unsetvalue
+ texsetattribute(a_mathalternate,m and m.attribute or unsetvalue)
end
end
alternate[math_char] = function(pointer)
- local a = pointer[a_mathalternate]
+ local a = getattr(pointer,a_mathalternate)
if a and a > 0 then
- pointer[a_mathalternate] = 0
- local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
+ setattr(pointer,a_mathalternate,0)
+ local tfmdata = fontdata[font_of_family(getfield(pointer,"fam"))] -- we can also have a famdata
local mathalternatesattributes = tfmdata.shared.mathalternatesattributes
if mathalternatesattributes then
local what = mathalternatesattributes[a]
- local alt = getalternate(tfmdata,pointer.char,what.feature,what.value)
+ local alt = getalternate(tfmdata,getchar(pointer),what.feature,what.value)
if alt then
if trace_alternates then
report_alternates("alternate %a, value %a, replacing glyph %U by glyph %U",
- tostring(what.feature),tostring(what.value),pointer.char,alt)
+ tostring(what.feature),tostring(what.value),getchar(pointer),alt)
end
- pointer.char = alt
+ setfield(pointer,"char",alt)
end
end
end
@@ -929,13 +936,14 @@ end
local function insert_kern(current,kern)
local sub = new_node(math_sub) -- todo: pool
local noad = new_node(math_noad) -- todo: pool
- sub.head = kern
- kern.next = noad
- noad.nucleus = current
+ setfield(sub,"list",kern)
+ setfield(kern,"next",noad)
+ setfield(noad,"nucleus",current)
return sub
end
local setcolor = nodes.tracers.colors.set
+local resetcolor = nodes.tracers.colors.reset
local italic_kern = new_kern
local c_positive_d = "trace:db"
local c_negative_d = "trace:dr"
@@ -947,11 +955,8 @@ trackers.register("math.italics", function(v)
if k > 0 then
return setcolor(new_rule(k,ex,ex),c_positive_d)
else
- return concat_nodes {
- old_kern(k),
- setcolor(new_rule(-k,ex,ex),c_negative_d),
- old_kern(k),
- }
+ -- influences un*
+ return old_kern(k) .. setcolor(new_rule(-k,ex,ex),c_negative_d) .. old_kern(k)
end
end
else
@@ -960,44 +965,44 @@ trackers.register("math.italics", function(v)
end)
italics[math_char] = function(pointer,what,n,parent)
- local method = pointer[a_mathitalics]
+ local method = getattr(pointer,a_mathitalics)
if method and method > 0 then
- local char = pointer.char
- local font = font_of_family(pointer.fam) -- todo: table
+ local char = getchar(pointer)
+ local font = font_of_family(getfield(pointer,"fam")) -- todo: table
local correction, visual = getcorrection(method,font,char)
if correction then
- local pid = parent.id
+ local pid = getid(parent)
local sub, sup
if pid == math_noad then
- sup = parent.sup
- sub = parent.sub
+ sup = getfield(parent,"sup")
+ sub = getfield(parent,"sub")
end
if sup or sub then
- local subtype = parent.subtype
+ local subtype = getsubtype(parent)
if subtype == noad_oplimits then
if sup then
- parent.sup = insert_kern(sup,italic_kern(correction,font))
+ setfield(parent,"sup",insert_kern(sup,italic_kern(correction,font)))
if trace_italics then
report_italics("method %a, adding %p italic correction for upper limit of %C",method,correction,char)
end
end
if sub then
local correction = - correction
- parent.sub = insert_kern(sub,italic_kern(correction,font))
+ setfield(parent,"sub",insert_kern(sub,italic_kern(correction,font)))
if trace_italics then
report_italics("method %a, adding %p italic correction for lower limit of %C",method,correction,char)
end
end
else
if sup then
- parent.sup = insert_kern(sup,italic_kern(correction,font))
+ setfield(parent,"sup",insert_kern(sup,italic_kern(correction,font)))
if trace_italics then
report_italics("method %a, adding %p italic correction before superscript after %C",method,correction,char)
end
end
end
else
- local next_noad = parent.next
+ local next_noad = getnext(parent)
if not next_noad then
if n== 1 then -- only at the outer level .. will become an option (always,endonly,none)
if trace_italics then
@@ -1005,12 +1010,12 @@ italics[math_char] = function(pointer,what,n,parent)
end
insert_node_after(parent,parent,italic_kern(correction,font))
end
- elseif next_noad.id == math_noad then
- local next_subtype = next_noad.subtype
+ elseif getid(next_noad) == math_noad then
+ local next_subtype = getsubtype(next_noad)
if next_subtype == noad_punct or next_subtype == noad_ord then
- local next_nucleus = next_noad.nucleus
- if next_nucleus.id == math_char then
- local next_char = next_nucleus.char
+ local next_nucleus = getfield(next_noad,"nucleus")
+ if getid(next_nucleus) == math_char then
+ local next_char = getchar(next_nucleus)
local next_data = chardata[next_char]
local visual = next_data.visual
if visual == "it" or visual == "bi" then
@@ -1063,14 +1068,14 @@ function mathematics.setitalics(n)
enable()
end
if n == variables.reset then
- texattribute[a_mathitalics] = unsetvalue
+ texsetattribute(a_mathitalics,unsetvalue)
else
- texattribute[a_mathitalics] = tonumber(n) or unsetvalue
+ texsetattribute(a_mathitalics,tonumber(n) or unsetvalue)
end
end
function mathematics.resetitalics()
- texattribute[a_mathitalics] = unsetvalue
+ texsetattribute(a_mathitalics,unsetvalue)
end
-- variants
@@ -1094,15 +1099,15 @@ local validvariants = { -- fast check on valid
}
variants[math_char] = function(pointer,what,n,parent) -- also set export value
- local char = pointer.char
+ local char = getchar(pointer)
local selector = validvariants[char]
if selector then
- local next = parent.next
- if next and next.id == math_noad then
- local nucleus = next.nucleus
- if nucleus and nucleus.id == math_char and nucleus.char == selector then
+ local next = getnext(parent)
+ if next and getid(next) == math_noad then
+ local nucleus = getfield(next,"nucleus")
+ if nucleus and getid(nucleus) == math_char and getchar(nucleus) == selector then
local variant
- local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
+ local tfmdata = fontdata[font_of_family(getfield(pointer,"fam"))] -- we can also have a famdata
local mathvariants = tfmdata.resources.variants -- and variantdata
if mathvariants then
mathvariants = mathvariants[selector]
@@ -1111,8 +1116,8 @@ variants[math_char] = function(pointer,what,n,parent) -- also set export value
end
end
if variant then
- pointer.char = variant
- pointer[a_exportstatus] = char -- we don't export the variant as it's visual markup
+ setfield(pointer,"char",variant)
+ setattr(pointer,a_exportstatus,char) -- we don't export the variant as it's visual markup
if trace_variants then
report_variants("variant (%U,%U) replaced by %U",char,selector,variant)
end
@@ -1121,8 +1126,8 @@ variants[math_char] = function(pointer,what,n,parent) -- also set export value
report_variants("no variant (%U,%U)",char,selector)
end
end
- next.prev = pointer
- parent.next = next.next
+ setfield(next,"prev",pointer)
+ setfield(parent,"next",getnext(next))
free_node(next)
end
end
@@ -1134,6 +1139,50 @@ function handlers.variants(head,style,penalties)
return true
end
+-- for manuals
+
+local classes = { }
+
+local colors = {
+ [noadcodes.rel] = "trace:dr",
+ [noadcodes.ord] = "trace:db",
+ [noadcodes.bin] = "trace:dg",
+ [noadcodes.open] = "trace:dm",
+ [noadcodes.close] = "trace:dm",
+ [noadcodes.punct] = "trace:dc",
+ -- [noadcodes.opdisplaylimits] = "",
+ -- [noadcodes.oplimits] = "",
+ -- [noadcodes.opnolimits] = "",
+ -- [noadcodes.inner = "",
+ -- [noadcodes.under = "",
+ -- [noadcodes.over = "",
+ -- [noadcodes.vcenter = "",
+}
+
+classes[math_char] = function(pointer,what,n,parent)
+ local color = colors[getsubtype(parent)]
+ if color then
+ setcolor(pointer,color)
+ else
+ resetcolor(pointer)
+ end
+end
+
+function handlers.classes(head,style,penalties)
+ processnoads(head,classes,"classes")
+ return true
+end
+
+trackers.register("math.classes",function(v) tasks.setaction("math","noads.handlers.classes",v) end)
+
+-- just for me
+
+function handlers.showtree(head,style,penalties)
+ inspect(nodes.totree(head))
+end
+
+trackers.register("math.showtree",function(v) tasks.setaction("math","noads.handlers.showtree",v) end)
+
-- the normal builder
function builders.kernel.mlist_to_hlist(head,style,penalties)
diff --git a/Master/texmf-dist/tex/context/base/math-pln.mkiv b/Master/texmf-dist/tex/context/base/math-pln.mkiv
index b862bb4cb40..5e4c43c81a2 100644
--- a/Master/texmf-dist/tex/context/base/math-pln.mkiv
+++ b/Master/texmf-dist/tex/context/base/math-pln.mkiv
@@ -87,13 +87,31 @@
\unexpanded\def\pmatrix#1%
{\left(\matrix{#1}\right)}
-\unexpanded\def\openup
- {\afterassignment\math_openup\scratchdimen=}
-
-\def\math_openup
- {\advance\lineskip \scratchdimen
- \advance\baselineskip \scratchdimen
- \advance\lineskiplimit\scratchdimen}
+% \unexpanded\def\openup
+% {\afterassignment\math_openup\scratchdimen=}
+%
+% \def\math_openup
+% {\advance\lineskip \scratchdimen
+% \advance\baselineskip \scratchdimen
+% \advance\lineskiplimit\scratchdimen}
+
+\let\math_closeup\relax
+
+\unexpanded\def\math_openup
+ {\afterassignment\math_openup_indeed\scratchdimen}
+
+\def\math_openup_indeed
+ {\unexpanded\edef\math_closeup
+ {\lineskip \the\lineskip
+ \baselineskip \the\baselineskip
+ \lineskiplimit\the\lineskiplimit
+ \relax}%
+ \advance \lineskip \scratchdimen
+ \advance \baselineskip \scratchdimen
+ \advance \lineskiplimit \scratchdimen}
+
+\let\openup \math_openup
+\def\closeup{\math_closeup} % dynamic
\unexpanded\def\displaylines#1%
{\the\mathdisplayaligntweaks
diff --git a/Master/texmf-dist/tex/context/base/math-rad.mkvi b/Master/texmf-dist/tex/context/base/math-rad.mkvi
new file mode 100644
index 00000000000..027b5c27d50
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/math-rad.mkvi
@@ -0,0 +1,290 @@
+%D \module
+%D [ file=math-rad,
+%D version=2013.07.13,
+%D title=\CONTEXT\ Math Macros,
+%D subtitle=Radicals,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Math Macros / Radicals}
+
+\unprotect
+
+%D \starttyping
+%D $\sqrt[3]{10}$
+%D \stoptyping
+%D
+%D This root command will be overloaded later:
+
+%D Old stuff:
+
+% \def\rootradical{\Uroot \defaultmathfamily "221A } % can be done in char-def
+% \def\surdradical{\Uradical \defaultmathfamily "221A } % can be done in char-def
+
+\def\root#1\of{\rootradical{#1}} % #2
+
+\unexpanded\def\sqrt{\doifnextoptionalcselse\rootwithdegree\rootwithoutdegree}
+
+\def\styledrootradical#1#2% so that \text works ok ... \rootradical behaves somewhat weird
+ {\normalexpanded{\rootradical{\normalunexpanded{#1}}{\noexpand\triggermathstyle{\normalmathstyle}\normalunexpanded{#2}}}}
+
+\def\rootwithdegree[#1]{\rootradical{#1}}
+\def\rootwithoutdegree {\rootradical {}}
+
+%D Even older stuff:
+
+% % is now a ordinary character
+%
+% \let\normalsurd\surd % \Uradical "0 "221A
+% \unexpanded\def\surd{\normalsurd{}}
+
+
+%D The real thing:
+
+\installcorenamespace{mathradical}
+\installcorenamespace{mathradicalalternative}
+
+\installcommandhandler \??mathradical {mathradical} \??mathradical
+
+\setupmathradical
+ [\c!alternative=\v!normal,
+ \c!mpoffset=.25\exheight]
+
+\appendtoks
+ \setuevalue{\currentmathradical}{\math_radical_handle{\currentmathradical}}
+\to \everydefinemathradical
+
+\unexpanded\def\math_radical_handle#tag%
+ {\begingroup
+ \edef\currentmathradical{#tag}%
+ \doifnextoptionalcselse\math_radical_degree_yes\math_radical_degree_nop}
+
+\def\math_radical_alternative{\csname\??mathradicalalternative\mathradicalparameter\c!alternative\endcsname}
+
+\def\m_math_no_degree{{}}
+
+\def\math_radical_degree_yes[#degree]{\edef\currentmathradicaldegree{#degree}\math_radical_indeed}
+\def\math_radical_degree_nop {\let\currentmathradicaldegree\m_math_no_degree\math_radical_indeed}
+
+\def\math_radical_indeed#body%
+ {\math_radical_alternative{#body}\endgroup}
+
+\setvalue{\??mathradicalalternative\v!default}% #body%
+ {\rootradical{\currentmathradicaldegree}} % {#body}}
+
+\setvalue{\??mathradicalalternative\v!normal}#body%
+ {\edef\p_color{\mathradicalparameter\c!color}%
+ \ifx\p_color\empty
+ \styledrootradical{\currentmathradicaldegree}{#body}% {} really needed as \rootradical expands first
+ \else\ifx\currentmathradicaldegree\empty
+ \pushcolor[\p_color]%
+ \styledrootradical{\currentmathradicaldegree}%
+ {\popcolor#body}%
+ \else
+ \pushcolor[\p_color]%
+ \styledrootradical{\popcolor\currentmathradicaldegree\pushcolor[\p_color]}%
+ {\popcolor#body}%
+ \fi\fi}
+
+% As I had a long standing whish to see a proper final root element I decided
+% to make one my own.
+%
+% \startMPcode
+% path p ; p := unitsquare xysized(4cm,1cm) ;
+%
+% path q ; q := boundingbox p enlarged (bbheight(p)/10) ;
+% numeric h ; h := bbheight(q) ;
+%
+% draw p ;
+% draw
+% llcorner q shifted (-h/2,h/2) --
+% llcorner q shifted (-h/4,0) --
+% ulcorner q --
+% urcorner q --
+% urcorner q shifted (0,-h/10) ;
+% \stopMPcode
+%
+% \startMPextensions
+% vardef math_root(expr w,h,d,o) =
+% path q ; q := boundingbox unitsquare xysized(w,h) enlarged (o);
+% llcorner q shifted (-h/2,h/2) --
+% llcorner q shifted (-h/4,-d) --
+% ulcorner q --
+% urcorner q --
+% urcorner q shifted (0,-h/10)
+% enddef ;
+% \stopMPextensions
+%
+% \startuniqueMPgraphic{root}{width,height,depth,offset,linewidth}
+% pickup pencircle scaled \MPvar{linewidth} ;
+% draw math_root(\MPvar{width},\MPvar{height},\MPvar{depth},\MPvar{offset}) ;
+% \stopuniqueMPgraphic
+%
+% \unexpanded\def\sqrt#1%
+% {\begingroup
+% \setbox\scratchbox\mathstylehbox{#1}%
+% \scratchoffset\MPrawvar{root}{offset}%.25\exheight
+% \scratchwidth \wd\scratchbox
+% \scratchheight\ht\scratchbox
+% \scratchdepth \dp\scratchbox
+% \setbox2=\hbox\bgroup % todo: tag this box as sqrt
+% \uniqueMPgraphic
+% {root}%
+% {width=\the\scratchwidth,%
+% depth=\the\scratchdepth,%
+% height=\the\scratchheight,%
+% offset=\the\scratchoffset,
+% linewidth=\the\linewidth}%
+% \egroup
+% \scratchdimen\wd2
+% \lower\dimexpr\scratchoffset+\scratchdepth\relax\box2
+% \hskip-\scratchdimen
+% \hbox to \scratchdimen{\hss\box\scratchbox\hskip\scratchoffset}%
+% \endgroup}
+
+\startMPextensions
+ vardef math_radical_simple(expr w,h,d,o) =
+ (-h/2-o,h/2-o) --
+ (-h/4-o,-d-o) --
+ (-o,h+o) --
+ (w+o,h+o) --
+ (w+o,h-h/10+o)
+ enddef ;
+\stopMPextensions
+
+\startuniqueMPgraphic{math:radical:default}%{...}
+ draw
+ math_radical_simple(OverlayWidth,OverlayHeight,OverlayDepth,OverlayOffset)
+ withpen pencircle xscaled (2OverlayLineWidth) yscaled (3OverlayLineWidth/4) rotated 30
+ % dashed evenly
+ withcolor OverlayLineColor ;
+\stopuniqueMPgraphic
+
+% todo: spacing .. this is just an experiment (article driven)
+
+\setvalue{\??mathradicalalternative\v!mp}#body% we could use dowithnextbox
+ {\begingroup
+ \scratchoffset\mathradicalparameter\c!mpoffset
+ \setbox\nextbox\mathstylehbox{#body}%
+ % we use the \overlay variables as these are passes anyway and
+ % it's more efficient than using parameters
+ \edef\overlaywidth {\the\wd\nextbox}%
+ \edef\overlayheight {\the\ht\nextbox}%
+ \edef\overlaydepth {\the\dp\nextbox}%
+ \edef\overlayoffset {\the\scratchoffset}%
+ \edef\overlaylinewidth{\the\linewidth}%
+ \edef\overlaylinecolor{\mathradicalparameter\c!color}%
+ %
+ \edef\p_mp{\mathradicalparameter\c!mp}%
+ %
+ \setbox\scratchbox\hbox\bgroup % todo: tag this box as sqrt
+ \uniqueMPgraphic
+ {\p_mp}%
+ %{...}%
+ \egroup
+ \scratchdimen \wd\scratchbox
+ \scratchtopoffset \dimexpr\scratchoffset+\dp\nextbox\relax
+ \scratchbottomoffset\dimexpr\scratchoffset+\ht\nextbox/2\relax
+ \hbox to \scratchdimen{\hss\box\nextbox\hskip\scratchoffset}%
+ \hskip-\scratchdimen
+ \lower\dimexpr\scratchtopoffset\box\scratchbox%
+ \ifx\currentmathradicaldegree\empty \else
+ \setbox\scratchbox\mathstylehbox{\scriptscriptstyle\currentmathradicaldegree\hss}%
+ \wd\scratchbox\scratchdimen
+ \hskip-\scratchdimen
+ \raise\dimexpr\scratchbottomoffset\box\scratchbox
+ \fi
+ \endgroup}
+
+\definemathradical[sqrt][mp=math:radical:default]
+
+% \setupmathradical[sqrt][alternative=normal,color=darkblue]
+% \setupmathradical[sqrt][alternative=mp,color=darkgreen]
+
+%D Because I wanted to illustrate some more fun stuff another mechanism
+%D is provided as well ... let's put some dangerous tools in the hand of
+%D math juglers like Aditya.
+
+\installcorenamespace{mathornament}
+\installcorenamespace{mathornamentalternative}
+
+\installcommandhandler \??mathornament {mathornament} \??mathornament
+
+\setupmathornament
+ [\c!alternative=\v!mp, % currently mp only .. maybe some day layer too
+ \c!mpoffset=.25\exheight]
+
+\appendtoks
+ \setuevalue{\currentmathornament}{\math_ornament_handle{\currentmathornament}}
+\to \everydefinemathornament
+
+\unexpanded\def\math_ornament_handle#tag#body%
+ {\begingroup
+ \edef\currentmathornament{#tag}%
+ \csname\??mathornamentalternative\mathornamentparameter\c!alternative\endcsname{#body}%
+ \endgroup}
+
+\setvalue{\??mathornamentalternative\v!mp}#body% we could use dowithnextbox
+ {\begingroup
+ \scratchoffset\mathornamentparameter\c!mpoffset
+ \setbox\nextbox\mathstylehbox{#body}%
+ \edef\overlaywidth {\the\wd\nextbox}%
+ \edef\overlayheight {\the\ht\nextbox}%
+ \edef\overlaydepth {\the\dp\nextbox}%
+ \edef\overlayoffset {\the\scratchoffset}%
+ \edef\overlaylinewidth{\the\linewidth}%
+ \edef\overlaylinecolor{\mathornamentparameter\c!color}%
+ \edef\p_mp{\mathornamentparameter\c!mp}%
+ % thw width of the graphic determines the width of the final result
+ \setbox\scratchbox\hbox{\uniqueMPgraphic{\p_mp}}% todo: add code key + tag
+ \scratchdimen \wd\scratchbox
+ % \scratchtopoffset \dimexpr\scratchoffset+\dp\nextbox\relax
+ % \scratchbottomoffset\dimexpr\scratchoffset+\ht\nextbox/2\relax
+ \hbox to \scratchdimen{\hss\box\nextbox\hss}%
+ \hskip-\scratchdimen
+ \box\scratchbox
+ \endgroup}
+
+% \startMPextensions
+% vardef math_ornament_hat(expr w,h,d,o,l) =
+% image ( path p ; p :=
+% (w/2,h + 10l) --
+% (o + w,h + o) --
+% (w/2,h + 7l) --
+% (-o,h + o) --
+% cycle ;
+% fill p ;
+% setbounds currentpicture to (-o,0) -- (w+o,0) -- (w+o,h+2o) -- (-o,h+2o) -- cycle ;
+% )
+% enddef ;
+% \stopMPextensions
+%
+% \startuniqueMPgraphic{math:ornament:hat}
+% draw
+% math_ornament_hat(
+% OverlayWidth,
+% OverlayHeight,
+% OverlayDepth,
+% OverlayOffset,
+% OverlayLineWidth
+% )
+% withpen
+% pencircle
+% xscaled (2OverlayLineWidth)
+% yscaled (3OverlayLineWidth/4)
+% rotated 30
+% withcolor
+% OverlayLineColor ;
+% draw boundingbox currentpicture;
+% \stopuniqueMPgraphic
+%
+% \definemathornament [mathhat] [mp=math:ornament:hat]
+%
+% \dorecurse{8}{$\mathhat{\blackrule[width=#1ex,color=gray]}$ }
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/math-ren.lua b/Master/texmf-dist/tex/context/base/math-ren.lua
index 2e7dba13df9..5c4c1336944 100644
--- a/Master/texmf-dist/tex/context/base/math-ren.lua
+++ b/Master/texmf-dist/tex/context/base/math-ren.lua
@@ -63,7 +63,3 @@ mathematics.renderset = renderset
function commands.mathrenderset(list)
context(renderset(list))
end
-
--- function commands.setmatrendering(list)
--- tex.setattribute(renderset(list))
--- end
diff --git a/Master/texmf-dist/tex/context/base/math-stc.mkvi b/Master/texmf-dist/tex/context/base/math-stc.mkvi
index 2dc2b2c22c1..76a07db5c68 100644
--- a/Master/texmf-dist/tex/context/base/math-stc.mkvi
+++ b/Master/texmf-dist/tex/context/base/math-stc.mkvi
@@ -48,6 +48,10 @@
%D
%D In the end we have a more flexible mechanism which also handles text variants.
+%D When wrapping up some math developments I decided to add mp support here
+%D as well. A nice evening job with Joe Bonamassa performing live on the big
+%D screen (real nice bluray's). See meta-imp-mat.mkiv for examples.
+
% possible improvements:
%
% - we could skip the left/right offsets when offset=normal, this saves some access time
@@ -56,27 +60,40 @@
\installcorenamespace {mathextensiblefallbacks}
+% currently no italic correction ... problem is that we don't know yet if we have an italic
+% below so we we need to postpone
+
\def\math_stackers_fallback
{\hbox to \scratchwidth{\csname\??mathextensiblefallbacks\ifcsname\??mathextensiblefallbacks\number\scratchunicode\endcsname\number\scratchunicode\fi\endcsname}}
%{\csname\??mathextensiblefallbacks\ifcsname\??mathextensiblefallbacks\number\scratchunicode\endcsname\number\scratchunicode\fi\endcsname }
+% \def\math_stackers_regular
+% {\mathstylehbox{\Umathaccent\fam\zerocount\scratchunicode{\hskip\scratchwidth}}}
+
\def\math_stackers_regular
- {\mathstylehbox{\Umathaccent\fam\zerocount\scratchunicode{\hskip\scratchwidth}}}
+ {\mathstylehbox{\usemathstackerscolorparameter\c!color
+ \Umathaccent\fam\zerocount\scratchunicode{\hskip\scratchwidth}}}
\def\math_stackers_stretch % we don't have that one yet
- {\mathstylehbox{\Umathaccent\fam\zerocount\scratchunicode{\hskip\hsize}}}
+ {\mathstylehbox{\usemathstackerscolorparameter\c!color
+ \Umathaccent\fam\zerocount\scratchunicode{\hskip\hsize}}}
+
+% these delimiters are a unuseable as theu don't center for small arguments:
+%
+% $\Umathaccent 0 0 "2190{x}$ \par $\Umathaccent 0 0 "27F8{x}$\par
+% $\Udelimiterunder 0 "2190{x}$ \par $\Udelimiterunder 0 "27F8{x}$\par
\setvalue{\??mathextensiblefallbacks}{\hbox{\vrule\!!width\scratchwidth\!!height.1\exheight\!!depth\zeropoint}}
-\def\math_stackers_with_fallback#codepoint%
- {\begingroup
- \scratchunicode#codepoint\relax
- \ifcase\mathextensiblecode\fam\scratchunicode\relax
- \math_stackers_fallback
- \else
- \math_stackers_stretch
- \fi
- \endgroup}
+% \def\math_stackers_with_fallback#codepoint%
+% {\begingroup
+% \scratchunicode#codepoint\relax
+% \ifcase\mathextensiblecode\fam\scratchunicode\relax
+% \math_stackers_fallback
+% \else
+% \math_stackers_stretch
+% \fi
+% \endgroup}
%D We don't really need this because we can assume that fonts have the right
%D extensibles. If needed I will make a general virtual extender for \OPENTYPE\
@@ -114,6 +131,12 @@
[%c!alternative=\v!text, % text | mathematics
\c!left=,
\c!right=,
+ \c!mathclass=\s!rel,
+ \c!alternative=\v!normal,
+ \c!mp=math:stacker:\number\scratchunicode,
+ \c!mpheight=\exheight,
+ \c!mpdepth=\exheight,
+ \c!mpoffset=.25\exheight,
\c!voffset=.25\exheight,
\c!hoffset=.5\emwidth,
\c!minheight=\exheight,
@@ -121,7 +144,7 @@
\c!minwidth=\emwidth,
\c!order=\v!normal,
\c!strut=,
- %\c!color=, % todo: when I need it
+ \c!color=, % todo: when I need it
\c!topcommand=,
\c!middlecommand=,
\c!bottomcommand=,
@@ -132,6 +155,7 @@
%D top of the baseline by default.
\installcorenamespace {mathstackerslocation}
+\installcorenamespace {mathstackersalternative}
\letvalue{\??mathstackerslocation\v!top }\plusone % on top of baseline
\letvalue{\??mathstackerslocation\v!high }\plustwo % 25 % down
@@ -172,6 +196,23 @@
\math_stackers_fallback
\fi}
+% no checking, we assume sane use
+
+\letvalue{\??mathstackersalternative\v!normal }\math_stackers_content
+\letvalue{\??mathstackersalternative\v!default}\math_stackers_content
+
+\setvalue{\??mathstackersalternative\v!mp}%
+ {\hbox\bgroup % todo: add code key + tag
+ \edef\overlaywidth {\the\scratchwidth}%
+ \edef\overlayheight {\the\dimexpr\mathstackersparameter\c!mpheight}%
+ \edef\overlaydepth {\the\dimexpr\mathstackersparameter\c!mpdepth}%
+ \edef\overlayoffset {\the\dimexpr\mathstackersparameter\c!mpoffset}%
+ \edef\overlaylinewidth{\the\linewidth}%
+ \edef\overlaylinecolor{\mathstackersparameter\c!color}%
+ \edef\p_mp{\mathstackersparameter\c!mp}%
+ \uniqueMPgraphic{\p_mp}%
+ \egroup}
+
\def\math_stackers_check_unicode#codepoint%
{\scratchunicode#codepoint\relax
\scratchhoffset\mathstackersparameter\c!hoffset\relax
@@ -223,11 +264,12 @@
{\begingroup
\edef\currentmathstackers{#category}%
\mathstackersparameter\c!left\relax
- \ifmmode\mathrel\else\dontleavehmode\fi
- {\edef\p_offset {\mathstackersparameter\c!offset}%
- \edef\p_location{\mathstackersparameter\c!location}%
- \edef\p_order {\mathstackersparameter\c!order}%
- \edef\p_strut {\mathstackersparameter\c!strut}%
+ \ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
+ {\edef\p_offset {\mathstackersparameter\c!offset}%
+ \edef\p_location {\mathstackersparameter\c!location}%
+ \edef\p_order {\mathstackersparameter\c!order}%
+ \edef\p_strut {\mathstackersparameter\c!strut}%
+ \edef\p_alternative{\mathstackersparameter\c!alternative}%
\ifx\p_order\v!reverse
\edef\m_math_stackers_text_top {#bottomtext}%
\edef\m_math_stackers_text_bottom{#toptext}%
@@ -285,17 +327,17 @@
\advance\scratchwidth2\scratchhoffset
%
\ifcase#method\relax
- \setbox\scratchboxthree\math_stackers_content
+ \setbox\scratchboxthree\csname\??mathstackersalternative\p_alternative\endcsname
\fi
%
\ifdim\wd\scratchboxone<\scratchwidth
- \setbox\scratchboxone\hbox to \scratchwidth{\hss\box\scratchboxone\hss}%
+ \setbox\scratchboxone\hbox to \scratchwidth{\hss\unhbox\scratchboxone\hss}% unhboxing makes leaders work
\fi
\ifdim\wd\scratchboxtwo<\scratchwidth
- \setbox\scratchboxtwo\hbox to \scratchwidth{\hss\box\scratchboxtwo\hss}%
+ \setbox\scratchboxtwo\hbox to \scratchwidth{\hss\unhbox\scratchboxtwo\hss}%
\fi
\ifdim\wd\scratchboxthree<\scratchwidth
- \setbox\scratchboxthree\hbox to \scratchwidth{\hss\box\scratchboxthree\hss}%
+ \setbox\scratchboxthree\hbox to \scratchwidth{\hss\unhbox\scratchboxthree\hss}%
\fi
%
\ifcsname\??mathstackerslocation\p_location\endcsname
@@ -395,17 +437,30 @@
%D The next one deals with under and over extensibles (arrows mostly):
-\unexpanded\def\math_stackers_double#where#category#codepoint#text%
+\installcorenamespace {mathclasses}
+
+\letvalue{\??mathclasses }\mathord
+\letvalue{\??mathclasses rel}\mathrel
+\letvalue{\??mathclasses ord}\mathord
+
+\def\math_class_by_parameter#1%
+ {\normalexpanded{\noexpand\math_class_by_parameter_indeed{#1\c!mathclass}}}
+
+\def\math_class_by_parameter_indeed#1%
+ {\csname\??mathclasses\ifcsname\??mathclasses#1\endcsname#1\fi\endcsname}
+
+\unexpanded\def\math_stackers_make_double#top#bottom#category#codepoint#codeextra#text%
{\begingroup
\edef\currentmathstackers{#category}%
\mathstackersparameter\c!left\relax
- \ifmmode\mathrel\else\dontleavehmode\fi
+ \ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
{\edef\currentmathstackers{#category}%
\edef\m_math_stackers_text_middle {#text}%
%
- \edef\p_offset {\mathstackersparameter\c!offset}%
- \edef\p_location{\mathstackersparameter\c!location}%
- \edef\p_strut {\mathstackersparameter\c!strut}%
+ \edef\p_offset {\mathstackersparameter\c!offset}%
+ \edef\p_location {\mathstackersparameter\c!location}%
+ \edef\p_strut {\mathstackersparameter\c!strut}%
+ \edef\p_alternative{\mathstackersparameter\c!alternative}%
%
\scratchleftoffset \zeropoint
\scratchrightoffset\zeropoint
@@ -425,7 +480,7 @@
\fi
\advance\scratchwidth2\scratchhoffset
%
- \setbox\scratchboxtwo \math_stackers_content
+ \setbox\scratchboxtwo \csname\??mathstackersalternative\p_alternative\endcsname
\setbox\scratchboxthree\hbox to \scratchwidth{\hss\box\scratchboxthree\hss}%
%
\math_stackers_normalize_three
@@ -436,49 +491,74 @@
%
\ifdim\htdp\scratchboxtwo>\zeropoint
\kern-\scratchwidth
- \ifcase#where\relax
+ \ifcase#top\else
\math_stackers_top\bgroup
- \raise\dimexpr\scratchheight+\scratchtopoffset\relax
+ % \raise\dimexpr\scratchheight+\scratchtopoffset\relax
+ \raise\dimexpr\scratchheight+\mathstackersparameter\c!voffset\relax
\box\scratchboxtwo
\egroup
- \else
+ \fi
+ \scratchunicode#codeextra\relax
+ \ifcase\scratchunicode\else
+ \kern-\scratchwidth
+ \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
+ \fi
+ \ifcase#bottom\else
\math_stackers_bottom\bgroup
- \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\scratchbottomoffset\relax
+ % \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\scratchbottomoffset\relax
+ \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\mathstackersparameter\c!voffset\relax
\box\scratchboxtwo
\egroup
\fi
\fi}%
\mathstackersparameter\c!right\relax
- \endgroup}
-
-\unexpanded\def\definemathoverextensible {\dotripleempty\math_extensiblies_define_over }
-\unexpanded\def\definemathunderextensible{\dotripleempty\math_extensiblies_define_under}
-
-\def\math_extensiblies_define_over[#1][#2][#3]%
+ \edef\p_limits{\mathstackersparameter\c!mathlimits}%
+ \ifx\p_limits\v!yes
+ \expandafter\endgroup\expandafter\limits
+ \else
+ \expandafter\endgroup
+ \fi}
+
+\unexpanded\def\definemathoverextensible {\dotripleempty \math_extensibles_define_over }
+\unexpanded\def\definemathunderextensible {\dotripleempty \math_extensibles_define_under}
+\unexpanded\def\definemathdoubleextensible{\doquadrupleempty\math_extensibles_define_double}
+
+\def\math_extensibles_define_over[#1][#2][#3]%
{\ifthirdargument
- \setuevalue{#2}{\math_stackers_double\zerocount{#1}{\number#3}}%
+ \setuevalue{#2}{\math_stackers_make_double\plusone \zerocount{#1}{\number#3}{0}}%
\else
- \setuevalue{#1}{\math_stackers_double\zerocount\noexpand\currentmathstackers{\number#2}}%
+ \setuevalue{#1}{\math_stackers_make_double\plusone \zerocount\noexpand\currentmathstackers{\number#2}{0}}%
\fi}
-\def\math_extensiblies_define_under[#1][#2][#3]%
+\def\math_extensibles_define_under[#1][#2][#3]%
{\ifthirdargument
- \setuevalue{#2}{\math_stackers_double\plusone{#1}{\number#3}}%
+ \setuevalue{#2}{\math_stackers_make_double\zerocount\plusone{#1}{\number#3}{0}}%
+ \else
+ \setuevalue{#1}{\math_stackers_make_double\zerocount\plusone\noexpand\currentmathstackers{\number#2}{0}}%
+ \fi}
+
+\def\math_extensibles_define_double[#1][#2][#3][#4]%
+ {\iffourthargument
+ \setuevalue{#2}{\math_stackers_make_double\plusone \plusone{#1}{\number#3}{\number#4}}%
\else
- \setuevalue{#1}{\math_stackers_double\plusone\noexpand\currentmathstackers{\number#2}}%
+ \setuevalue{#1}{\math_stackers_make_double\plusone \plusone\noexpand\currentmathstackers{\number#2}{\number#3}}%
\fi}
-\unexpanded\def\mathover {\begingroup\dosingleempty\math_stackers_handle_over }
-\unexpanded\def\mathunder{\begingroup\dosingleempty\math_stackers_handle_under}
+\unexpanded\def\mathover {\begingroup\dosingleempty\math_stackers_handle_over }
+\unexpanded\def\mathunder {\begingroup\dosingleempty\math_stackers_handle_under }
+\unexpanded\def\mathdouble{\begingroup\dodoubleempty\math_stackers_handle_double}
\def\math_stackers_handle_over[#category]%
- {\math_stackers_handle_double\zerocount{\iffirstargument#category\else\v!top \fi}} % will be defined later on
+ {\math_stackers_direct_double\plusone\zerocount{\iffirstargument#category\else\v!top \fi}} % will be defined later on
\def\math_stackers_handle_under[#category]#codepoint#bottomtext%
- {\math_stackers_handle_double\plusone {\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
+ {\math_stackers_direct_double\zerocount\plusone{\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
+
+\def\math_stackers_handle_double[#category]#codepoint#bottomtext%
+ {\math_stackers_direct_double\plusone\plusone {\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
-\def\math_stackers_handle_double#location#category#codepoint#text%
- {\math_stackers_double#location{#category}{#codepoint}{#text}%
+\def\math_stackers_direct_double#top#bottom#category#codepoint#text%
+ {\math_stackers_make_double#top#bottom{#category}{#codepoint}{#text}%
\endgroup}
%D Here is a bonus macro that takes three texts. It can be used to get consistent
@@ -558,23 +638,79 @@
[\c!order=\v!reverse]
\definemathstackers
- [\v!top]
+ [\v!both]
[\v!mathematics]
- [\c!location=\v!top,
+ [\c!location=\v!top, % ?
\c!strut=\v!no,
\c!middlecommand=\mathematics,
\c!hoffset=\zeropoint]
+\definemathstackers
+ [\v!top]
+ [\v!both]
+
\definemathstackers
[\v!bottom]
- [\v!mathematics]
- [\c!location=\v!top,
- \c!strut=\v!no,
- \c!middlecommand=\mathematics,
- \c!hoffset=\zeropoint]
+ [\v!both]
+
+\definemathstackers
+ [vfenced]
+ [\v!both]
+ [\c!mathclass=\s!ord,
+ \c!mathlimits=\v!yes]
% These are compatibity definitions, math only.
+% todo: top= bottom= middle= is nicer (compare math-fen)
+
+%D We save a few definitions that we automatically got from the \type {char-def.lua}
+%D database.
+
+% Be careful in choosing what accents you take (the code below uses a
+% combining one):
+%
+% \startbuffer
+% % $\Umathaccent top 0 0 "20D7 {example}$
+% % $\Umathaccent top fixed 0 0 "20D7 {example}$
+% $\Umathaccent 0 0 "20D7 {example}$
+% $\Umathaccent fixed 0 0 "20D7 {example}$
+% $\Umathaccent bottom 0 0 "20D7 {example}$
+% $\Umathaccent bottom fixed 0 0 "20D7 {example}$
+% $\Umathaccent both 0 0 "20D7
+% 0 0 "20D7 {example}$
+% $\Umathaccent both fixed 0 0 "20D7
+% fixed 0 0 "20D7 {example}$
+% $\Umathaccent both 0 0 "20D7
+% fixed 0 0 "20D7 {example}$
+% $\Umathaccent both fixed 0 0 "20D7
+% 0 0 "20D7 {example}$
+% \stopbuffer
+%
+% \setupbodyfont[modern] \getbuffer
+% \setupbodyfont[xits] \getbuffer
+% \setupbodyfont[cambria] \getbuffer
+
+\unexpanded\def\normaldoublebrace {\Umathaccents 0 \defaultmathfamily "23DE 0 \defaultmathfamily "23DF }
+\unexpanded\def\normaldoubleparent{\Umathaccents 0 \defaultmathfamily "23DC 0 \defaultmathfamily "23DD }
+
+% let's keep this
+
+\let\normaloverbrace \overbrace
+\let\normalunderbrace \underbrace
+\let\normaloverparent \overparent
+\let\normalunderparent \underparent
+\let\normaloverbracket \overbracket
+\let\normalunderbracket \underbracket
+\let\normalunderleftarrow \underleftarrow
+\let\normaloverleftarrow \overleftarrow
+\let\normalunderrightarrow\underrightarrow
+\let\normaloverrightarrow \overrightarrow
+
+\let\lceil \lceiling
+\let\rceil \rceiling
+
+%D Here come the new ones:
+
\definemathstackers [\v!none] [\v!mathematics] [\c!hoffset=\zeropoint]
\definemathstackers [\v!normal] [\v!mathematics] [\c!hoffset=0.5\emwidth] % the default
\definemathstackers [\v!small] [\v!mathematics] [\c!hoffset=1\emwidth]
@@ -583,8 +719,12 @@
\definemathextensible [\v!reverse] [xrel] ["002D]
\definemathextensible [\v!reverse] [xequal] ["003D]
-\definemathextensible [\v!reverse] [xleftarrow] ["2190]
-\definemathextensible [\v!reverse] [xrightarrow] ["2192]
+\definemathextensible [\v!reverse] [xleftarrow] ["2190] % ["27F5]
+\definemathextensible [\v!reverse] [xrightarrow] ["2192] % ["27F6]
+\definemathextensible [\v!reverse] [xleftrightarrow] ["27F7]
+\definemathextensible [\v!reverse] [xLeftarrow] ["27F8]
+\definemathextensible [\v!reverse] [xRightarrow] ["27F9]
+\definemathextensible [\v!reverse] [xLeftrightarrow] ["27FA]
\definemathextensible [\v!reverse] [xtwoheadleftarrow] ["219E]
\definemathextensible [\v!reverse] [xtwoheadrightarrow] ["21A0]
\definemathextensible [\v!reverse] [xmapsto] ["21A6]
@@ -598,15 +738,15 @@
\definemathextensible [\v!reverse] [xleftrightharpoons] ["21CB]
\definemathextensible [\v!reverse] [xrightleftharpoons] ["21CC]
\definemathextensible [\v!reverse] [xtriplerel] ["2261]
-\definemathextensible [\v!reverse] [xleftrightarrow] ["27F7]
-\definemathextensible [\v!reverse] [xLeftarrow] ["27F8]
-\definemathextensible [\v!reverse] [xRightarrow] ["27F9]
-\definemathextensible [\v!reverse] [xLeftrightarrow] ["27FA]
\definemathextensible [\v!mathematics] [mrel] ["002D]
\definemathextensible [\v!mathematics] [mequal] ["003D]
-\definemathextensible [\v!mathematics] [mleftarrow] ["2190]
-\definemathextensible [\v!mathematics] [mrightarrow] ["2192]
+\definemathextensible [\v!mathematics] [mleftarrow] ["2190] % ["27F5]
+\definemathextensible [\v!mathematics] [mrightarrow] ["2192] % ["27F6]
+\definemathextensible [\v!mathematics] [mleftrightarrow] ["27F7]
+\definemathextensible [\v!mathematics] [mLeftarrow] ["27F8]
+\definemathextensible [\v!mathematics] [mRightarrow] ["27F9]
+\definemathextensible [\v!mathematics] [mLeftrightarrow] ["27FA]
\definemathextensible [\v!mathematics] [mtwoheadleftarrow] ["219E]
\definemathextensible [\v!mathematics] [mtwoheadrightarrow] ["21A0]
\definemathextensible [\v!mathematics] [mmapsto] ["21A6]
@@ -620,18 +760,18 @@
\definemathextensible [\v!mathematics] [mleftrightharpoons] ["21CB]
\definemathextensible [\v!mathematics] [mrightleftharpoons] ["21CC]
\definemathextensible [\v!mathematics] [mtriplerel] ["2261]
-\definemathextensible [\v!mathematics] [mleftrightarrow] ["27F7]
-\definemathextensible [\v!mathematics] [mLeftarrow] ["27F8]
-\definemathextensible [\v!mathematics] [mRightarrow] ["27F9]
-\definemathextensible [\v!mathematics] [mLeftrightarrow] ["27FA]
\definemathextensible [\v!text] [trel] ["002D]
\definemathextensible [\v!text] [tequal] ["003D]
-\definemathextensible [\v!text] [tleftarrow] ["2190]
-\definemathextensible [\v!text] [trightarrow] ["2192]
+\definemathextensible [\v!text] [tmapsto] ["21A6]
+\definemathextensible [\v!text] [tleftarrow] ["2190] % ["27F5]
+\definemathextensible [\v!text] [trightarrow] ["2192] % ["27F6]
+\definemathextensible [\v!text] [tleftrightarrow] ["27F7]
+\definemathextensible [\v!text] [tLeftarrow] ["27F8]
+\definemathextensible [\v!text] [tRightarrow] ["27F9]
+\definemathextensible [\v!text] [tLeftrightarrow] ["27FA]
\definemathextensible [\v!text] [ttwoheadleftarrow] ["219E]
\definemathextensible [\v!text] [ttwoheadrightarrow] ["21A0]
-\definemathextensible [\v!text] [tmapsto] ["21A6]
\definemathextensible [\v!text] [thookleftarrow] ["21A9]
\definemathextensible [\v!text] [thookrightarrow] ["21AA]
\definemathextensible [\v!text] [tleftharpoondown] ["21BD]
@@ -642,30 +782,59 @@
\definemathextensible [\v!text] [tleftrightharpoons] ["21CB]
\definemathextensible [\v!text] [trightleftharpoons] ["21CC]
\definemathextensible [\v!text] [ttriplerel] ["2261]
-\definemathextensible [\v!text] [tleftrightarrow] ["27F7]
-\definemathextensible [\v!text] [tLeftarrow] ["27F8]
-\definemathextensible [\v!text] [tRightarrow] ["27F9]
-\definemathextensible [\v!text] [tLeftrightarrow] ["27FA]
-\definemathoverextensible [\v!top] [overleftarrow] ["2190]
-\definemathoverextensible [\v!top] [overrightarrow] ["2192]
+\definemathoverextensible [\v!top] [overleftarrow] ["2190] % ["27F5]
+\definemathoverextensible [\v!top] [overrightarrow] ["2192] % ["27F6]
+\definemathoverextensible [\v!top] [overleftrightarrow] ["27F7]
+\definemathoverextensible [\v!top] [overtwoheadleftarrow] ["27F8]
+\definemathoverextensible [\v!top] [overtwoheadrightarrow] ["27F9]
\definemathoverextensible [\v!top] [overleftharpoondown] ["21BD]
\definemathoverextensible [\v!top] [overleftharpoonup] ["21BC]
\definemathoverextensible [\v!top] [overrightharpoondown] ["21C1]
\definemathoverextensible [\v!top] [overrightharpoonup] ["21C0]
-\definemathoverextensible [\v!top] [overleftrightarrow] ["27F7]
-\definemathoverextensible [\v!top] [overtwoheadleftarrow] ["27F8]
-\definemathoverextensible [\v!top] [overtwoheadrightarrow] ["27F9]
-\definemathunderextensible [\v!bottom] [underleftarrow] ["2190]
-\definemathunderextensible [\v!bottom] [underrightarrow] ["2192]
+\definemathunderextensible [\v!bottom] [underleftarrow] ["2190] % ["27F5]
+\definemathunderextensible [\v!bottom] [underrightarrow] ["2192] % ["27F6]
+\definemathunderextensible [\v!bottom] [underleftrightarrow] ["27F7]
+\definemathunderextensible [\v!bottom] [undertwoheadleftarrow] ["27F8]
+\definemathunderextensible [\v!bottom] [undertwoheadrightarrow] ["27F9]
\definemathunderextensible [\v!bottom] [underleftharpoondown] ["21BD]
\definemathunderextensible [\v!bottom] [underleftharpoonup] ["21BC]
\definemathunderextensible [\v!bottom] [underrightharpoondown] ["21C1]
\definemathunderextensible [\v!bottom] [underrightharpoonup] ["21C0]
-\definemathunderextensible [\v!bottom] [underleftrightarrow] ["27F7]
-\definemathunderextensible [\v!bottom] [undertwoheadleftarrow] ["27F8]
-\definemathunderextensible [\v!bottom] [undertwoheadrightarrow] ["27F9]
+
+% We don't use overline and underline. This is one of the overlooked aspects of
+% unicode cq. opentype math: why treat rules different than e.g. arrows and
+% accents. It is a bit unfortunate that the opportunity to move math to new
+% technologies happened outside the tex domain (and/or some aspects were kept
+% while in fact they were side effects of limitations of traditional fonts).
+% From the unicode aware tex engines' implementation point of view things
+% could have been done a bit nicer but then: the community didn't seem to care
+% too much and just has to follow now.
+%
+% Anyhow, we use a character based approach so that at least we get unicode
+% stuff in the backend (okay, we still need to deal with some cut and paste
+% issues but at least we now know what we deal with.
+
+% alternatively we can move the original to FE*
+
+\definemathoverextensible [vfenced] [overbar] ["FE33E] % ["203E]
+\definemathunderextensible [vfenced] [underbar] ["FE33F] % ["203E]
+\definemathdoubleextensible [vfenced] [doublebar] ["FE33E] ["FE33F]
+
+\definemathoverextensible [vfenced] [overbrace] ["FE3DE] % ["023DE]
+\definemathunderextensible [vfenced] [underbrace] ["FE3DF] % ["023DF]
+\definemathdoubleextensible [vfenced] [doublebrace] ["FE3DE] ["FE3DF]
+
+\definemathoverextensible [vfenced] [overparent] ["FE3DC] % ["023DC]
+\definemathunderextensible [vfenced] [underparent] ["FE3DD] % ["023DD]
+\definemathdoubleextensible [vfenced] [doubleparent] ["FE3DC] ["FE3DD]
+
+\definemathoverextensible [vfenced] [overbracket] ["FE3B4] % ["023B4]
+\definemathunderextensible [vfenced] [underbracket] ["FE3B5] % ["023B5]
+\definemathdoubleextensible [vfenced] [doublebracket] ["FE3B4] ["FE3B5]
+
+% \unexpanded\def\mathopwithlimits#1#2{\mathop{#1{#2}}\limits}
%D Some bonus ones (for the moment here):
@@ -696,7 +865,7 @@
#3%
\endgroup}}
-% These will be defined in char-def as well:
+% These will be defined in char-def as well once we have \leaders
\unexpanded\def\rightarrowfill {\math_stackers_hacked_fill \relbar \relbar \rightarrow}
\unexpanded\def\leftarrowfill {\math_stackers_hacked_fill \leftarrow \relbar \relbar }
@@ -734,6 +903,7 @@
{\expandafter\let\csname\??mathextensiblefallbacks\number#2\expandafter\endcsname\csname#1\endcsname
\expandafter\let\csname #1\expandafter\endcsname\csname#1\endcsname}
+\defineextensiblefiller [barfill] ["203E]
\defineextensiblefiller [relfill] ["002D]
\defineextensiblefiller [equalfill] ["003D]
\defineextensiblefiller [leftarrowfill] ["2190]
@@ -755,6 +925,7 @@
\defineextensiblefiller [Leftarrowfill] ["27F8]
\defineextensiblefiller [Rightarrowfill] ["27F9]
\defineextensiblefiller [Leftrightarrowfill] ["27FA]
+\defineextensiblefiller [Leftrightarrowfill] ["27FA]
%D Extra:
@@ -766,6 +937,40 @@
% \mathchardef\doublebond"003D
% \mathchardef\triplebond"2261
+%D Also handy:
+
+\unexpanded\def\definemathunstacked
+ {\dotripleempty\math_stackers_define_unstacked_normal}
+
+\def\math_stackers_define_unstacked_normal[#1][#2][#3]% category name unicode
+ {\ifthirdargument
+ \setuevalue{#2}{\math_stackers_unstacked_normal{#1}{\number#3}}%
+ \else
+ \setuevalue{#1}{\math_stackers_unstacked_normal\noexpand\currentmathstackers{\number#2}}%
+ \fi}
+
+\unexpanded\def\math_stackers_unstacked_normal#category#codepoint%
+ {\begingroup
+ \edef\currentmathstackers{#category}%
+ \edef\p_moffset{\mathstackersparameter\c!moffset}%
+ \ifx\p_moffset\empty \else
+ \mskip\scratchmuskip
+ \fi
+ \ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
+ {\usemathstackerscolorparameter\c!color
+ \Umathchar\zerocount\defaultmathfamily#codepoint}%
+ \ifx\p_moffset\empty \else
+ \mskip\scratchmuskip
+ \fi
+ \endgroup}
+
+\definemathstackers [\v!wide] [\c!moffset=\thickmuskip,\c!mathclass=\s!rel]
+
+\definemathunstacked [\v!wide] [And] ["0026] % \mathrel{\;&\;}
+\definemathunstacked [\v!wide] [impliedby] ["27F8] % \mathrel{\;\Longleftarrow\;}
+\definemathunstacked [\v!wide] [implies] ["27F9] % \mathrel{\;\Longrightarrow\;}
+\definemathunstacked [\v!wide] [iff] ["27FA] % \mathrel{\;\Longleftrightarrow\;}
+
\protect \endinput
% \mathrel{\mathop{\hbox to \dimen0{\hss\copy4\hss}}
diff --git a/Master/texmf-dist/tex/context/base/math-tag.lua b/Master/texmf-dist/tex/context/base/math-tag.lua
index ab5902dd416..3cd4cae16eb 100644
--- a/Master/texmf-dist/tex/context/base/math-tag.lua
+++ b/Master/texmf-dist/tex/context/base/math-tag.lua
@@ -11,10 +11,22 @@ if not modules then modules = { } end modules ['math-tag'] = {
local find, match = string.find, string.match
local insert, remove = table.insert, table.remove
-local attributes, nodes = attributes, nodes
+local attributes = attributes
+local nodes = nodes
-local set_attributes = nodes.setattributes
-local traverse_nodes = node.traverse
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getchar = nuts.getchar
+local getlist = nuts.getlist
+local getfield = nuts.getfield
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local set_attributes = nuts.setattributes
+local traverse_nodes = nuts.traverse
local nodecodes = nodes.nodecodes
@@ -61,22 +73,24 @@ local function processsubsup(start)
-- At some point we might need to add an attribute signaling the
-- super- and subscripts because TeX and MathML use a different
-- order.
- local nucleus, sup, sub = start.nucleus, start.sup, start.sub
+ local nucleus = getfield(start,"nucleus")
+ local sup = getfield(start,"sup")
+ local sub = getfield(start,"sub")
if sub then
if sup then
- start[a_tagged] = start_tagged("msubsup")
+ setattr(start,a_tagged,start_tagged("msubsup"))
process(nucleus)
process(sub)
process(sup)
stop_tagged()
else
- start[a_tagged] = start_tagged("msub")
+ setattr(start,a_tagged,start_tagged("msub"))
process(nucleus)
process(sub)
stop_tagged()
end
elseif sup then
- start[a_tagged] = start_tagged("msup")
+ setattr(start,a_tagged,start_tagged("msup"))
process(nucleus)
process(sup)
stop_tagged()
@@ -93,11 +107,11 @@ local actionstack = { }
process = function(start) -- we cannot use the processor as we have no finalizers (yet)
while start do
- local id = start.id
+ local id = getid(start)
if id == math_char_code then
- local char = start.char
+ local char = getchar(start)
-- check for code
- local a = start[a_mathcategory]
+ local a = getattr(start,a_mathcategory)
if a then
a = { detail = a }
end
@@ -119,22 +133,22 @@ process = function(start) -- we cannot use the processor as we have no finalizer
else
tag = "mo"
end
- start[a_tagged] = start_tagged(tag,a)
+ setattr(start,a_tagged,start_tagged(tag,a))
stop_tagged()
break -- okay?
elseif id == math_textchar_code then
-- check for code
- local a = start[a_mathcategory]
+ local a = getattr(start,a_mathcategory)
if a then
- start[a_tagged] = start_tagged("ms",{ detail = a })
+ setattr(start,a_tagged,start_tagged("ms",{ detail = a }))
else
- start[a_tagged] = start_tagged("ms")
+ setattr(start,a_tagged,start_tagged("ms"))
end
stop_tagged()
break
elseif id == math_delim_code then
-- check for code
- start[a_tagged] = start_tagged("mo")
+ setattr(start,a_tagged,start_tagged("mo"))
stop_tagged()
break
elseif id == math_style_code then
@@ -143,14 +157,14 @@ process = function(start) -- we cannot use the processor as we have no finalizer
processsubsup(start)
elseif id == math_box_code or id == hlist_code or id == vlist_code then
-- keep an eye on math_box_code and see what ends up in there
- local attr = start[a_tagged]
+ local attr = getattr(start,a_tagged)
local last = attr and taglist[attr]
if last and find(last[#last],"formulacaption[:%-]") then
-- leave alone, will nicely move to the outer level
else
local text = start_tagged("mtext")
- start[a_tagged] = text
- local list = start.list
+ setattr(start,a_tagged,text)
+ local list = getfield(start,"list")
if not list then
-- empty list
elseif not attr then
@@ -166,8 +180,8 @@ process = function(start) -- we cannot use the processor as we have no finalizer
local function runner(list) -- quite inefficient
local cache = { } -- we can have nested unboxed mess so best local to runner
for n in traverse_nodes(list) do
- local id = n.id
- local aa = n[a_tagged]
+ local id = getid(n)
+ local aa = getattr(n,a_tagged)
if aa then
local ac = cache[aa]
if not ac then
@@ -185,12 +199,12 @@ process = function(start) -- we cannot use the processor as we have no finalizer
end
cache[aa] = ac
end
- n[a_tagged] = ac
+ setattr(n,a_tagged,ac)
else
- n[a_tagged] = text
+ setattr(n,a_tagged,text)
end
if id == hlist_code or id == vlist_code then
- runner(n.list)
+ runner(getlist(n))
end
end
end
@@ -199,47 +213,53 @@ process = function(start) -- we cannot use the processor as we have no finalizer
stop_tagged()
end
elseif id == math_sub_code then
- local list = start.list
+ local list = getfield(start,"list")
if list then
- local attr = start[a_tagged]
+ local attr = getattr(start,a_tagged)
local last = attr and taglist[attr]
local action = last and match(last[#last],"maction:(.-)%-")
if action and action ~= "" then
if actionstack[#actionstack] == action then
- start[a_tagged] = start_tagged("mrow")
+ setattr(start,a_tagged,start_tagged("mrow"))
process(list)
stop_tagged()
else
insert(actionstack,action)
- start[a_tagged] = start_tagged("mrow",{ detail = action })
+ setattr(start,a_tagged,start_tagged("mrow",{ detail = action }))
process(list)
stop_tagged()
remove(actionstack)
end
else
- start[a_tagged] = start_tagged("mrow")
+ setattr(start,a_tagged,start_tagged("mrow"))
process(list)
stop_tagged()
end
end
elseif id == math_fraction_code then
- local num, denom, left, right = start.num, start.denom, start.left, start.right
+ local num = getfield(start,"num")
+ local denom = getfield(start,"denom")
+ local left = getfield(start,"left")
+ local right = getfield(start,"right")
if left then
- left[a_tagged] = start_tagged("mo")
+ setattr(left,a_tagged,start_tagged("mo"))
process(left)
stop_tagged()
end
- start[a_tagged] = start_tagged("mfrac")
+ setattr(start,a_tagged,start_tagged("mfrac"))
process(num)
process(denom)
stop_tagged()
if right then
- right[a_tagged] = start_tagged("mo")
+ setattr(right,a_tagged,start_tagged("mo"))
process(right)
stop_tagged()
end
elseif id == math_choice_code then
- local display, text, script, scriptscript = start.display, start.text, start.script, start.scriptscript
+ local display = getfield(start,"display")
+ local text = getfield(start,"text")
+ local script = getfield(start,"script")
+ local scriptscript = getfield(start,"scriptscript")
if display then
process(display)
end
@@ -253,67 +273,69 @@ process = function(start) -- we cannot use the processor as we have no finalizer
process(scriptscript)
end
elseif id == math_fence_code then
- local delim = start.delim
- local subtype = start.subtype
+ local delim = getfield(start,"delim")
+ local subtype = getfield(start,"subtype")
+ -- setattr(start,a_tagged,start_tagged("mfenced")) -- needs checking
if subtype == 1 then
-- left
- start[a_tagged] = start_tagged("mfenced")
if delim then
- start[a_tagged] = start_tagged("mleft")
+ setattr(start,a_tagged,start_tagged("mleft"))
process(delim)
stop_tagged()
end
elseif subtype == 2 then
-- middle
if delim then
- start[a_tagged] = start_tagged("mmiddle")
+ setattr(start,a_tagged,start_tagged("mmiddle"))
process(delim)
stop_tagged()
end
elseif subtype == 3 then
if delim then
- start[a_tagged] = start_tagged("mright")
+ setattr(start,a_tagged,start_tagged("mright"))
process(delim)
stop_tagged()
end
- stop_tagged()
else
-- can't happen
end
+ -- stop_tagged()
elseif id == math_radical_code then
- local left, degree = start.left, start.degree
+ local left = getfield(start,"left")
+ local degree = getfield(start,"degree")
if left then
start_tagged("")
process(left) -- root symbol, ignored
stop_tagged()
end
if degree then -- not good enough, can be empty mlist
- start[a_tagged] = start_tagged("mroot")
+ setattr(start,a_tagged,start_tagged("mroot"))
processsubsup(start)
process(degree)
stop_tagged()
else
- start[a_tagged] = start_tagged("msqrt")
+ setattr(start,a_tagged,start_tagged("msqrt"))
processsubsup(start)
stop_tagged()
end
elseif id == math_accent_code then
- local accent, bot_accent = start.accent, start.bot_accent
+ local accent = getfield(start,"accent")
+ local bot_accent = getfield(start,"bot_accent")
if bot_accent then
if accent then
- start[a_tagged] = start_tagged("munderover",{ detail = "accent" })
+ setattr(start,a_tagged,start_tagged("munderover",{ detail = "accent" }))
processsubsup(start)
process(bot_accent)
process(accent)
stop_tagged()
else
- start[a_tagged] = start_tagged("munder",{ detail = "accent" })
+ setattr(start,a_tagged,start_tagged("munder",{ detail = "accent" }))
processsubsup(start)
process(bot_accent)
stop_tagged()
end
elseif accent then
- start[a_tagged] = start_tagged("mover",{ detail = "accent" })
+ setattr(start,a_tagged,start_tagged("mover",{ detail = "accent" }))
processsubsup(start)
process(accent)
stop_tagged()
@@ -321,22 +343,23 @@ process = function(start) -- we cannot use the processor as we have no finalizer
processsubsup(start)
end
elseif id == glue_code then
- start[a_tagged] = start_tagged("mspace")
+ setattr(start,a_tagged,start_tagged("mspace"))
stop_tagged()
else
- start[a_tagged] = start_tagged("merror", { detail = nodecodes[i] })
+ setattr(start,a_tagged,start_tagged("merror", { detail = nodecodes[i] }))
stop_tagged()
end
- start = start.next
+ start = getnext(start)
end
end
function noads.handlers.tags(head,style,penalties)
+ head = tonut(head)
local v_math = start_tagged("math")
local v_mrow = start_tagged("mrow")
- local v_mode = head[a_mathmode]
- head[a_tagged] = v_math
- head[a_tagged] = v_mrow
+ local v_mode = getattr(head,a_mathmode)
+ -- setattr(head,a_tagged,v_math)
+ setattr(head,a_tagged,v_mrow)
tags.setattributehash(v_math,"mode",v_mode == 1 and "display" or "inline")
process(head)
stop_tagged()
diff --git a/Master/texmf-dist/tex/context/base/math-vfu.lua b/Master/texmf-dist/tex/context/base/math-vfu.lua
index 2f7c0507ba9..6d9a9f903bc 100644
--- a/Master/texmf-dist/tex/context/base/math-vfu.lua
+++ b/Master/texmf-dist/tex/context/base/math-vfu.lua
@@ -26,6 +26,7 @@ local type, next = type, next
local max = math.max
local format = string.format
local utfchar = utf.char
+local fastcopy = table.copy
local fonts, nodes, mathematics = fonts, nodes, mathematics
@@ -38,6 +39,7 @@ local report_virtual = logs.reporter("fonts","virtual math")
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
+local formatters = string.formatters
local mathencodings = allocate()
fonts.encodings.math = mathencodings -- better is then: fonts.encodings.vectors
@@ -199,21 +201,44 @@ end
-- { "node", nodeinjections.transform(.7,0,0,.7) },
-- commands[#commands+1] = { "node", nodeinjections.restore() }
-local done = { }
+-- local done = { }
+--
+-- local function raise(main,characters,id,size,unicode,private,n,id_of_smaller) -- this is a real fake mess
+-- local raised = characters[private]
+-- if raised then
+-- if not done[unicode] then
+-- report_virtual("temporary too large %U due to issues in luatex backend",unicode)
+-- done[unicode] = true
+-- end
+-- local up = 0.85 * main.parameters.x_height
+-- local slot = { "slot", id, private }
+-- local commands = {
+-- push,
+-- { "down", - up },
+-- -- { "scale", .7, 0, 0, .7 },
+-- slot,
+-- }
+-- for i=2,n do
+-- commands[#commands+1] = slot
+-- end
+-- commands[#commands+1] = pop
+-- characters[unicode] = {
+-- width = .7 * n * raised.width,
+-- height = .7 * (raised.height + up),
+-- depth = .7 * (raised.depth - up),
+-- commands = commands,
+-- }
+-- end
+-- end
-local function raise(main,characters,id,size,unicode,private,n) -- this is a real fake mess
- local raised = characters[private]
+local function raise(main,characters,id,size,unicode,private,n,id_of_smaller) -- this is a real fake mess
+ local raised = fonts.hashes.characters[main.fonts[id_of_smaller].id][private] -- characters[private]
if raised then
- if not done[unicode] then
- report_virtual("temporary too large %U due to issues in luatex backend",unicode)
- done[unicode] = true
- end
local up = 0.85 * main.parameters.x_height
- local slot = { "slot", id, private }
+ local slot = { "slot", id_of_smaller, private }
local commands = {
push,
{ "down", - up },
- -- { "scale", .7, 0, 0, .7 },
slot,
}
for i=2,n do
@@ -221,9 +246,10 @@ local function raise(main,characters,id,size,unicode,private,n) -- this is a rea
end
commands[#commands+1] = pop
characters[unicode] = {
- width = .7 * n * raised.width,
- height = .7 * (raised.height + up),
- depth = .7 * (raised.depth - up),
+ width = n * raised.width,
+ height = (raised.height or 0) + up,
+ depth = (raised.depth or 0) - up,
+ italic = raised.italic,
commands = commands,
}
end
@@ -406,7 +432,25 @@ local function repeated(main,characters,id,size,unicode,u,n,private,fraction) --
end
end
+-- we use the fact that context defines the smallest sizes first .. a real dirty and ugly hack
+
+local data_of_smaller = nil
+local size_of_smaller = 0
+
function vfmath.addmissing(main,id,size)
+
+ local id_of_smaller = nil
+
+ if size < size_of_smaller or size_of_smaller == 0 then
+ data_of_smaller = main.fonts[id]
+ id_of_smaller = id
+ else
+ id_of_smaller = #main.fonts + 1
+ main.fonts[id_of_smaller] = data_of_smaller
+ end
+
+ -- here id is the index in fonts (normally 14 or so) and that slot points to self
+
local characters = main.characters
local shared = main.shared
local variables = main.goodies.mathematics and main.goodies.mathematics.variables or { }
@@ -504,9 +548,11 @@ function vfmath.addmissing(main,id,size)
repeated(main,characters,id,size,0x222C,0x222B,2,0xFF800,1/3)
repeated(main,characters,id,size,0x222D,0x222B,3,0xFF810,1/3)
- -- raise (main,characters,id,size,0x02032,0xFE325,1) -- prime
- -- raise (main,characters,id,size,0x02033,0xFE325,2) -- double prime
- -- raise (main,characters,id,size,0x02034,0xFE325,3) -- triple prime
+ characters[0xFE325] = fastcopy(characters[0x2032])
+
+ raise (main,characters,id,size,0x02032,0xFE325,1,id_of_smaller) -- prime
+ raise (main,characters,id,size,0x02033,0xFE325,2,id_of_smaller) -- double prime
+ raise (main,characters,id,size,0x02034,0xFE325,3,id_of_smaller) -- triple prime
-- there are more (needs discussion first):
@@ -515,6 +561,9 @@ function vfmath.addmissing(main,id,size)
characters[0x02B9] = characters[0x2032] -- we're nice
+ data_of_smaller = main.fonts[id]
+ size_of_smaller = size
+
end
local unique = 0 -- testcase: \startTEXpage \math{!\text{-}\text{-}\text{-}} \stopTEXpage
@@ -534,6 +583,82 @@ setmetatableindex(reverse, function(t,name)
return r
end)
+local function copy_glyph(main,target,original,unicode,slot)
+ local addprivate = fonts.helpers.addprivate
+ local olddata = original[unicode]
+ if olddata then
+ local newdata = {
+ width = olddata.width,
+ height = olddata.height,
+ depth = olddata.depth,
+ italic = olddata.italic,
+ kerns = olddata.kerns,
+ commands = { { "slot", slot, unicode } },
+ }
+ local glyphdata = newdata
+ local nextglyph = olddata.next
+ while nextglyph do
+ local oldnextdata = original[nextglyph]
+ local newnextdata = {
+ commands = { { "slot", slot, nextglyph } },
+ width = oldnextdata.width,
+ height = oldnextdata.height,
+ depth = oldnextdata.depth,
+ }
+ local newnextglyph = addprivate(main,formatters["M-N-%H"](nextglyph),newnextdata)
+ newdata.next = newnextglyph
+-- report_virtual("copied next: %X",newdata.next)
+ local nextnextglyph = oldnextdata.next
+ if nextnextglyph == nextglyph then
+ break
+ else
+ olddata = oldnextdata
+ newdata = newnextdata
+ nextglyph = nextnextglyph
+ end
+ end
+ local hv = olddata.horiz_variants
+ if hv then
+ hv = fastcopy(hv)
+ newdata.horiz_variants = hv
+ for i=1,#hv do
+ local hvi = hv[i]
+ local oldglyph = hvi.glyph
+ local olddata = original[oldglyph]
+ local newdata = {
+ commands = { { "slot", slot, oldglyph } },
+ width = olddata.width,
+ height = olddata.height,
+ depth = olddata.depth,
+ }
+ hvi.glyph = addprivate(main,formatters["M-H-%H"](oldglyph),newdata)
+-- report_virtual("copied h variant: %X at index %i",hvi.glyph,i)
+ end
+ end
+ local vv = olddata.vert_variants
+ if vv then
+ vv = fastcopy(vv)
+ newdata.vert_variants = vv
+ for i=1,#vv do
+ local vvi = vv[i]
+ local oldglyph = vvi.glyph
+ local olddata = original[oldglyph]
+ local newdata = {
+ commands = { { "slot", slot, oldglyph } },
+ width = olddata.width,
+ height = olddata.height,
+ depth = olddata.depth,
+ }
+ vvi.glyph = addprivate(main,formatters["M-V-%H"](oldglyph),newdata)
+-- report_virtual("copied v variant: %X at index %i",vvi.glyph,i)
+ end
+ end
+ return newdata
+ end
+end
+
+vfmath.copy_glyph = copy_glyph
+
function vfmath.define(specification,set,goodies)
local name = specification.name -- symbolic name
local size = specification.size -- given size
@@ -576,7 +701,7 @@ function vfmath.define(specification,set,goodies)
shared[n] = { }
end
if trace_virtual then
- report_virtual("loading font %a subfont %s with name %a at %p as id %s using encoding %p",name,s,ssname,size,id,ss.vector)
+ report_virtual("loading font %a subfont %s with name %a at %p as id %s using encoding %a",name,s,ssname,size,id,ss.vector)
end
if not ss.checked then
ss.checked = true
@@ -677,6 +802,7 @@ function vfmath.define(specification,set,goodies)
parameters.x_height = parameters.x_height or 0
--
local already_reported = false
+ local parameters_done = false
for s=1,n do
local ss, fs = okset[s], loaded[s]
if not fs then
@@ -685,7 +811,13 @@ function vfmath.define(specification,set,goodies)
-- skip, redundant
else
local newparameters = fs.parameters
- if not newparameters then
+ local newmathparameters = fs.mathparameters
+ if newmathparameters then
+ if not parameters_done or ss.parameters then
+ mathparameters = newmathparameters
+ parameters_done = true
+ end
+ elseif not newparameters then
report_virtual("no parameters set in font %a",name)
elseif ss.extension then
mathparameters.math_x_height = newparameters.x_height or 0 -- math_x_height : height of x
@@ -716,187 +848,202 @@ function vfmath.define(specification,set,goodies)
mathparameters.axis_height = newparameters[22] or 0 -- axis_height : height of fraction lines above the baseline
-- report_virtual("loading and virtualizing font %a at size %p, setting sy parameters",name,size)
end
- local vectorname = ss.vector
- if vectorname then
- local offset = 0xFF000
- local vector = mathencodings[vectorname]
- local rotcev = reverse[vectorname]
- local isextension = ss.extension
- if vector and rotcev then
- local fc, fd, si = fs.characters, fs.descriptions, shared[s]
- local skewchar = ss.skewchar
- for unicode, index in next, vector do
- local fci = fc[index]
- if not fci then
- local fontname = fs.properties.name or "unknown"
- local rf = reported[fontname]
- if not rf then rf = { } reported[fontname] = rf end
- local rv = rf[vectorname]
- if not rv then rv = { } rf[vectorname] = rv end
- local ru = rv[unicode]
- if not ru then
- if trace_virtual then
- report_virtual("unicode slot %U has no index %H in vector %a for font %a",unicode,index,vectorname,fontname)
- elseif not already_reported then
- report_virtual("the mapping is incomplete for %a at %p",name,size)
- already_reported = true
- end
- rv[unicode] = true
- end
- else
- local ref = si[index]
- if not ref then
- ref = { { 'slot', s, index } }
- si[index] = ref
- end
- local kerns = fci.kerns
- local width = fci.width
- local italic = fci.italic
- if italic and italic > 0 then
- -- int_a^b
- if isextension then
- width = width + italic -- for obscure reasons the integral as a width + italic correction
- end
- end
- if kerns then
- local krn = { }
- for k, v in next, kerns do -- kerns is sparse
- local rk = rotcev[k]
- if rk then
- krn[rk] = v -- kerns[k]
- end
- end
- if not next(krn) then
- krn = nil
- end
- local t = {
- width = width,
- height = fci.height,
- depth = fci.depth,
- italic = italic,
- kerns = krn,
- commands = ref,
- }
- if skewchar then
- local k = kerns[skewchar]
- if k then
- t.top_accent = width/2 + k
+ if ss.overlay then
+ local fc = fs.characters
+ local first = ss.first
+ if first then
+ local last = ss.last or first
+ for unicode = first, last do
+ characters[unicode] = copy_glyph(main,characters,fc,unicode,s)
+ end
+ else
+ for unicode, data in next, fc do
+ characters[unicode] = copy_glyph(main,characters,fc,unicode,s)
+ end
+ end
+ else
+ local vectorname = ss.vector
+ if vectorname then
+ local offset = 0xFF000
+ local vector = mathencodings[vectorname]
+ local rotcev = reverse[vectorname]
+ local isextension = ss.extension
+ if vector and rotcev then
+ local fc, fd, si = fs.characters, fs.descriptions, shared[s]
+ local skewchar = ss.skewchar
+ for unicode, index in next, vector do
+ local fci = fc[index]
+ if not fci then
+ local fontname = fs.properties.name or "unknown"
+ local rf = reported[fontname]
+ if not rf then rf = { } reported[fontname] = rf end
+ local rv = rf[vectorname]
+ if not rv then rv = { } rf[vectorname] = rv end
+ local ru = rv[unicode]
+ if not ru then
+ if trace_virtual then
+ report_virtual("unicode slot %U has no index %H in vector %a for font %a",unicode,index,vectorname,fontname)
+ elseif not already_reported then
+ report_virtual("the mapping is incomplete for %a at %p",name,size)
+ already_reported = true
end
+ rv[unicode] = true
end
- characters[unicode] = t
else
- characters[unicode] = {
- width = width,
- height = fci.height,
- depth = fci.depth,
- italic = italic,
- commands = ref,
- }
- end
- end
- end
- if isextension then
- -- todo: if multiple ex, then 256 offsets per instance
- local extension = mathencodings["large-to-small"]
- local variants_done = fs.variants_done
- for index, fci in next, fc do -- the raw ex file
- if type(index) == "number" then
local ref = si[index]
if not ref then
ref = { { 'slot', s, index } }
si[index] = ref
end
+ local kerns = fci.kerns
+ local width = fci.width
local italic = fci.italic
- local t = {
- width = fci.width,
- height = fci.height,
- depth = fci.depth,
- italic = italic,
- commands = ref,
- }
- local n = fci.next
- if n then
- t.next = offset + n
- elseif variants_done then
- local vv = fci.vert_variants
- if vv then
- t.vert_variants = vv
- end
- local hv = fci.horiz_variants
- if hv then
- t.horiz_variants = hv
+ if italic and italic > 0 then
+ -- int_a^b
+ if isextension then
+ width = width + italic -- for obscure reasons the integral as a width + italic correction
end
- else
- local vv = fci.vert_variants
- if vv then
- for i=1,#vv do
- local vvi = vv[i]
- vvi.glyph = vvi.glyph + offset
+ end
+ if kerns then
+ local krn = { }
+ for k, v in next, kerns do -- kerns is sparse
+ local rk = rotcev[k]
+ if rk then
+ krn[rk] = v -- kerns[k]
end
- t.vert_variants = vv
end
- local hv = fci.horiz_variants
- if hv then
- for i=1,#hv do
- local hvi = hv[i]
- hvi.glyph = hvi.glyph + offset
+ if not next(krn) then
+ krn = nil
+ end
+ local t = {
+ width = width,
+ height = fci.height,
+ depth = fci.depth,
+ italic = italic,
+ kerns = krn,
+ commands = ref,
+ }
+ if skewchar then
+ local k = kerns[skewchar]
+ if k then
+ t.top_accent = width/2 + k
end
- t.horiz_variants = hv
end
+ characters[unicode] = t
+ else
+ characters[unicode] = {
+ width = width,
+ height = fci.height,
+ depth = fci.depth,
+ italic = italic,
+ commands = ref,
+ }
end
- characters[offset + index] = t
end
end
- fs.variants_done = true
- for unicode, index in next, extension do
- local cu = characters[unicode]
- if cu then
- cu.next = offset + index
- else
- local fci = fc[index]
- if not fci then
- -- do nothing
- else
- -- probably never entered
+ if isextension then
+ -- todo: if multiple ex, then 256 offsets per instance
+ local extension = mathencodings["large-to-small"]
+ local variants_done = fs.variants_done
+ for index, fci in next, fc do -- the raw ex file
+ if type(index) == "number" then
local ref = si[index]
if not ref then
ref = { { 'slot', s, index } }
si[index] = ref
end
- local kerns = fci.kerns
- if kerns then
- local krn = { }
- -- for k=1,#kerns do
- -- krn[offset + k] = kerns[k]
- -- end
- for k, v in next, kerns do -- is kerns sparse?
- krn[offset + k] = v
+ local italic = fci.italic
+ local t = {
+ width = fci.width,
+ height = fci.height,
+ depth = fci.depth,
+ italic = italic,
+ commands = ref,
+ }
+ local n = fci.next
+ if n then
+ t.next = offset + n
+ elseif variants_done then
+ local vv = fci.vert_variants
+ if vv then
+ t.vert_variants = vv
+ end
+ local hv = fci.horiz_variants
+ if hv then
+ t.horiz_variants = hv
end
- characters[unicode] = {
- width = fci.width,
- height = fci.height,
- depth = fci.depth,
- italic = fci.italic,
- commands = ref,
- kerns = krn,
- next = offset + index,
- }
else
- characters[unicode] = {
- width = fci.width,
- height = fci.height,
- depth = fci.depth,
- italic = fci.italic,
- commands = ref,
- next = offset + index,
- }
+ local vv = fci.vert_variants
+ if vv then
+ for i=1,#vv do
+ local vvi = vv[i]
+ vvi.glyph = vvi.glyph + offset
+ end
+ t.vert_variants = vv
+ end
+ local hv = fci.horiz_variants
+ if hv then
+ for i=1,#hv do
+ local hvi = hv[i]
+ hvi.glyph = hvi.glyph + offset
+ end
+ t.horiz_variants = hv
+ end
+ end
+ characters[offset + index] = t
+ end
+ end
+ fs.variants_done = true
+ for unicode, index in next, extension do
+ local cu = characters[unicode]
+ if cu then
+ cu.next = offset + index
+ else
+ local fci = fc[index]
+ if not fci then
+ -- do nothing
+ else
+ -- probably never entered
+ local ref = si[index]
+ if not ref then
+ ref = { { 'slot', s, index } }
+ si[index] = ref
+ end
+ local kerns = fci.kerns
+ if kerns then
+ local krn = { }
+ -- for k=1,#kerns do
+ -- krn[offset + k] = kerns[k]
+ -- end
+ for k, v in next, kerns do -- is kerns sparse?
+ krn[offset + k] = v
+ end
+ characters[unicode] = {
+ width = fci.width,
+ height = fci.height,
+ depth = fci.depth,
+ italic = fci.italic,
+ commands = ref,
+ kerns = krn,
+ next = offset + index,
+ }
+ else
+ characters[unicode] = {
+ width = fci.width,
+ height = fci.height,
+ depth = fci.depth,
+ italic = fci.italic,
+ commands = ref,
+ next = offset + index,
+ }
+ end
end
end
end
end
+ else
+ report_virtual("error in loading %a, problematic vector %a",name,vectorname)
end
- else
- report_virtual("error in loading %a, problematic vector %a",name,vectorname)
end
end
mathematics.extras.copy(main) --not needed here (yet)
diff --git a/Master/texmf-dist/tex/context/base/meta-fig.mkiv b/Master/texmf-dist/tex/context/base/meta-fig.mkiv
index 7fbc33be9e0..46dc4cffc81 100644
--- a/Master/texmf-dist/tex/context/base/meta-fig.mkiv
+++ b/Master/texmf-dist/tex/context/base/meta-fig.mkiv
@@ -54,7 +54,7 @@
\unexpanded\def\MPfigure#1#2% test for dup figure, can be replaced by a textext
{\bgroup
- \getfiguredimensionsonly[#1]% [\c!object=\v!no] already set
+ \getfiguredimensions[#1]% [\c!object=\v!no] already set
\startMPcode
externalfigure "#1"
xscaled \the\dimexpr\figurewidth \relax\space % must be points
diff --git a/Master/texmf-dist/tex/context/base/meta-fnt.lua b/Master/texmf-dist/tex/context/base/meta-fnt.lua
new file mode 100644
index 00000000000..596d0f45669
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/meta-fnt.lua
@@ -0,0 +1,269 @@
+if not modules then modules = { } end modules ['meta-fnt'] = {
+ version = 1.001,
+ comment = "companion to meta-fnt.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local concat = table.concat
+local format = string.format
+local formatters = string.formatters
+local chardata = characters.data
+local fontdata = fonts.hashes.identifiers
+
+local vffonts = fonts.handlers.vf
+
+local mpfonts = fonts.mp or { }
+fonts.mp = mpfonts
+
+mpfonts.version = mpfonts.version or 1.20
+mpfonts.inline = true
+mpfonts.cache = containers.define("fonts", "mp", mpfonts.version, true)
+
+metapost.fonts = metapost.fonts or { }
+
+-- a few glocals
+
+local characters, descriptions = { }, { }
+local factor, code, slot, width, height, depth, total, variants, bbox, llx, lly, urx, ury = 100, { }, 0, 0, 0, 0, 0, 0, true, 0, 0, 0, 0
+
+-- The next variant of ActualText is what Taco and I could come up with
+-- eventually. As of September 2013 Acrobat copies okay, Sumatra copies a
+-- question mark, pdftotext injects an extra space and Okular adds a
+-- newline plus space.
+
+-- return formatters["BT /Span << /ActualText (CONTEXT) >> BDC [] TJ % t EMC ET"](code)
+
+local function topdf(n,code)
+ if n < 0x10000 then
+ return formatters["BT /Span << /ActualText >> BDC [] TJ % t EMC ET"](n,code)
+ else
+ return formatters["BT /Span << /ActualText >> BDC [] TJ % t EMC ET"](n/1024+0xD800,n%1024+0xDC00,code)
+ end
+end
+
+-- local function topdf(n,code)
+-- return formatters["/Span << /ActualText (CTX) >> BDC % t EMC"](code)
+-- end
+
+local flusher = {
+ startfigure = function(_chr_,_llx_,_lly_,_urx_,_ury_)
+ code = { }
+ slot = _chr_
+ llx = _llx_
+ lly = _lly_
+ urx = _urx_
+ ury = _ury_
+ width = urx - llx
+ height = ury
+ depth = -lly
+ total = total + 1
+ inline = mpfonts.inline
+ end,
+ flushfigure = function(t)
+ for i=1,#t do
+ code[#code+1] = t[i]
+ end
+ end,
+ stopfigure = function()
+ local cd = chardata[n]
+ descriptions[slot] = {
+ -- unicode = slot,
+ name = cd and cd.adobename,
+ width = width * 100,
+ height = height * 100,
+ depth = depth * 100,
+ boundingbox = { llx, lly, urx, ury },
+ }
+ if inline then
+ characters[slot] = {
+ commands = {
+ { "special", "pdf:" .. topdf(slot,code) },
+ }
+ }
+ else
+ characters[slot] = {
+ commands = {
+ {
+ "image",
+ {
+ stream = topdf(slot,code),
+ bbox = { 0, -depth * 65536, width * 65536, height * 65536 }
+ },
+ },
+ }
+ }
+ end
+ end
+}
+
+local function process(mpxformat,name,instances,scalefactor)
+ local filename = resolvers.findfile(name)
+ local attributes = filename and lfs.isfile(filename) and lfs.attributes(filename)
+ if attributes then
+ statistics.starttiming(metapost.fonts)
+ scalefactor = scalefactor or 1
+ instances = instances or metapost.fonts.instances or 1 -- maybe store in liost too
+ local fontname = file.removesuffix(file.basename(name))
+ local modification = attributes.modification
+ local filesize = attributes.size
+ local hash = file.robustname(formatters["%s %05i %03i"](fontname,scalefactor*1000,instances))
+ local lists = containers.read(mpfonts.cache,hash)
+ if not lists or lists.modification ~= modification or lists.filesize ~= filesize or lists.instances ~= instances or lists.scalefactor ~= scalefactor then
+ statistics.starttiming(flusher)
+ local data = io.loaddata(filename)
+ metapost.reset(mpxformat)
+ metapost.setoutercolor(2) -- no outer color and no reset either
+ lists = { }
+ for i=1,instances do
+ characters = { }
+ descriptions = { }
+ metapost.process(
+ mpxformat,
+ {
+ formatters["randomseed := %s ;"](i*10),
+ formatters["charscale := %s ;"](scalefactor),
+ data,
+ },
+ false,
+ flusher,
+ false,
+ false,
+ "all"
+ )
+ lists[i] = {
+ characters = characters,
+ descriptions = descriptions,
+ parameters = {
+ designsize = 655360,
+ slant = 0,
+ space = 333 * scalefactor,
+ space_stretch = 166.5 * scalefactor,
+ space_shrink = 111 * scalefactor,
+ x_height = 431 * scalefactor,
+ quad = 1000 * scalefactor,
+ extra_space = 0,
+ },
+ properties = {
+ name = formatters["%s-%03i"](hash,i),
+ virtualized = true,
+ spacer = "space",
+ }
+ }
+ end
+ lists.version = metapost.variables.fontversion or "1.000"
+ lists.modification = modification
+ lists.filesize = filesize
+ lists.instances = instances
+ lists.scalefactor = scalefactor
+ metapost.reset(mpxformat) -- saves memory
+ lists = containers.write(mpfonts.cache, hash, lists)
+ statistics.stoptiming(flusher)
+ end
+ variants = variants + #lists
+ statistics.stoptiming(metapost.fonts)
+ return lists
+ else
+ return { }
+ end
+end
+
+metapost.fonts.flusher = flusher
+metapost.fonts.instances = 1
+metapost.fonts.process = process
+
+local function build(g,v)
+ local size = g.specification.size
+ local data = process(v[2],v[3],v[4],size/655360,v[6])
+ local list = { }
+ local t = { }
+ for d=1,#data do
+ t = fonts.constructors.scale(data[d],-1000)
+ local id = font.nextid()
+ t.fonts = { { id = id } }
+ fontdata[id] = t
+ if v[5] then
+ vffonts.helpers.composecharacters(t)
+ end
+ list[d] = font.define(t)
+ end
+ for k, v in next, t do -- last t
+ g[k] = v -- kind of replace, when not present, make nil
+ end
+ g.properties.virtualized = true
+ g.variants = list
+end
+
+vffonts.combiner.commands.metapost = build
+vffonts.combiner.commands.metafont = build
+
+statistics.register("metapost font generation", function()
+ if total > 0 then
+ local time = statistics.elapsedtime(flusher)
+ if total > 0 then
+ return format("%i glyphs, %.3f seconds runtime, %i glyphs/second", total, time, total/time)
+ else
+ return format("%i glyphs, %.3f seconds runtime", total, time)
+ end
+ end
+end)
+
+statistics.register("metapost font loading",function()
+ if variants > 0 then
+ local time = statistics.elapsedtime(metapost.fonts)
+ if variants > 0 then
+ return format("%.3f seconds, %i instances, %0.3f instances/second", time, variants, variants/time)
+ else
+ return format("%.3f seconds, %i instances", time, variants)
+ end
+ end
+end)
+
+-- fonts.definers.methods.install( "bidi", {
+-- {
+-- "metapost", -- method
+-- "metafun", -- format
+-- "fontoeps.mp", -- filename
+-- 1, -- instances
+-- false, -- compose
+-- },
+-- } )
+
+local report = logs.reporter("metapost","fonts")
+
+function metapost.fonts.define(specification)
+ local fontname = specification.fontname or ""
+ local filename = specification.filename or ""
+ local format = specification.format or "metafun"
+ if fontname == "" then
+ report("no fontname given")
+ return
+ end
+ if filename == "" then
+ report("no filename given for %a",fontname)
+ return
+ end
+ local fullname = resolvers.findfile(filename)
+ if fullname == "" then
+ report("unable to locate file %a",filename)
+ return
+ end
+ report("generating font %a using format %a and file %a",fontname,format,filename)
+ fonts.definers.methods.install(fontname, {
+ {
+ specification.engine or "metapost",
+ format,
+ filename,
+ specification.instances or 1,
+ specification.compose or false,
+ },
+ } )
+end
+
+commands.definemetafont = metapost.fonts.define
+
+-- metapost.fonts.define {
+-- fontname = "bidi",
+-- filename = "bidi-symbols.mp",
+-- }
diff --git a/Master/texmf-dist/tex/context/base/meta-fnt.mkiv b/Master/texmf-dist/tex/context/base/meta-fnt.mkiv
new file mode 100644
index 00000000000..603fcf14d51
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/meta-fnt.mkiv
@@ -0,0 +1,36 @@
+%D \module
+%D [ file=meta-fnt,
+%D version=2013.09.06,
+%D title=\METAPOST\ Graphics,
+%D subtitle=Fonts,
+%D author=Hans Hagen,
+%D date=\ currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{MetaPost Graphics / Fonts}
+
+\registerctxluafile{meta-fnt}{1.001}
+
+\unprotect
+
+\unexpanded\def\definemetafont
+ {\dotripleempty\meta_font_define}
+
+\def\meta_font_define[#1][#2][#3]%
+ {\ctxcommand{definemetafont {
+ fontname = "#1",
+ filename = "#2"
+ % no #3 settings yet (compose, instances)
+ }}}
+
+% \startluacode
+% metapost.fonts.define { fontname = "bidi-symbols", filename = "bidi-symbols.mp" }
+% \stopluacode
+
+% \definemetafont[bidi-symbols][bidi-symbols.mp]
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/meta-imp-txt.mkiv b/Master/texmf-dist/tex/context/base/meta-imp-txt.mkiv
index 3a9ad5927f8..bcfc5513fbe 100644
--- a/Master/texmf-dist/tex/context/base/meta-imp-txt.mkiv
+++ b/Master/texmf-dist/tex/context/base/meta-imp-txt.mkiv
@@ -170,12 +170,12 @@
% we default to nothing
\stopuseMPgraphic
-\unexpanded\def\followtokens#1%
+\unexpanded\def\dofollowtokens#1#2%
{\vbox\bgroup
\forgetall
\dontcomplain
\startMPenvironment
- \doifundefined{RotFont}{\definefont[RotFont][RegularBold]}
+ \doifundefined{RotFont}{\definefont[RotFont][RegularBold]}%
\stopMPenvironment
\MPtoks\emptytoks
\resetMPdrawing
@@ -183,13 +183,13 @@
\includeMPgraphic{followtokens} ;
picture pic[] ; numeric len[], n ; n := 0 ;
\stopMPdrawing
- \handletokens#1\with\processfollowingtoken
+ \handletokens#2\with\processfollowingtoken
\startMPdrawing
if unknown RotPath : path RotPath ; RotPath := origin ; fi ;
if unknown RotColor : color RotColor ; RotColor := black ; fi ;
if unknown TraceRot : boolean TraceRot ; TraceRot := false ; fi ;
if unknown ExtraRot : numeric ExtraRot ; ExtraRot := 0 ; fi ;
- numeric al, at, pl, wid, pos ; pair ap, ad ;
+ numeric al, at, pl, pc, wid, pos ; pair ap, ad ;
al := arclength RotPath ;
if al=0 :
al := len[n] + ExtraRot ;
@@ -199,13 +199,19 @@
RotPath := RotPath scaled ((len[n]+ExtraRot)/al) ;
al := arclength RotPath ;
fi ;
- pl := (al-len[n])/(if n>1 : (n-1) else : 1 fi) ;
+ if \number#1 = 1 :
+ pl := (al-len[n])/(if n>1 : (n-1) else : 1 fi) ;
+ pc := 0 ;
+ else : % centered / MP
+ pl := 0 ;
+ pc := arclength RotPath/2 - len[n]/2 ;
+ fi ;
if TraceRot :
draw RotPath withpen pencircle scaled 1pt withcolor blue ;
fi ;
for i=1 upto n :
wid := abs(xpart urcorner pic[i] - xpart llcorner pic[i]) ;
- pos := len[i]-wid/2 + (i-1)*pl ;
+ pos := len[i]-wid/2 + (i-1)*pl + pc ;
at := arctime pos of RotPath ;
ap := point at of RotPath ;
ad := direction at of RotPath ;
@@ -225,6 +231,11 @@
\resetMPdrawing
\egroup}
+\unexpanded\def\followtokens {\dofollowtokens1}
+\unexpanded\def\followtokenscentered{\dofollowtokens0}
+
+% stretched variant:
+%
% \followtokens
% {This is just a dummy text, kerned by T{\kern
% -.1667em\lower .5ex\hbox {E}}{\kern -.125emX} and typeset
@@ -232,6 +243,27 @@
% E}{\setMFPfont T}{\setMFPfont A}{\setMFPfont
% P}{\setMFPfont O}{\setMFPfont S}{\setMFPfont T}.\quad}
+% centered variant:
+%
+% \def\followtokengraphicscale#1{%%
+% \startuseMPgraphic {followtokens}
+% path RotPath; RotPath := reverse halfcircle scaled #1 ;
+% draw RotPath ;
+% setbounds currentpicture to boundingbox fullcircle scaled 12cm ;
+% \stopuseMPgraphic}
+%
+% \startoverlay
+% {\followtokengraphicscale{12cm}%%
+% \followtokenscentered{There was question on the list about this kind of graphics.}}
+% {\followtokengraphicscale{10cm}%%
+% \followtokenscentered{And Marco patched followingtokens to handle a centered text.}}
+% {\followtokengraphicscale{8cm}%%
+% \followtokenscentered{That ended up as variant branch in the main macro.}}
+% {\followtokengraphicscale{6cm}%%
+% \followtokenscentered{So now we have two commands.}}
+% \stopoverlay
+
+
\startuseMPgraphic{fuzzycount}
begingroup
save height, span, drift, d, cp ;
diff --git a/Master/texmf-dist/tex/context/base/meta-ini.mkiv b/Master/texmf-dist/tex/context/base/meta-ini.mkiv
index 6502047fbc0..281143e40b0 100644
--- a/Master/texmf-dist/tex/context/base/meta-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/meta-ini.mkiv
@@ -264,7 +264,10 @@
\ifx\p_setups\empty \else
\setups[\p_setups]%
\fi
- \useMPinstancestyleandcolor\c!textstyle\c!textcolor}
+ \useMPinstancestyleparameter\c!textstyle}
+
+\def\meta_set_current_color
+ {\useMPinstancecolorparameter\c!textcolor}
\def\meta_stop_current_graphic
{\global\t_meta_definitions\emptytoks
@@ -317,7 +320,7 @@
\let\normal_meta_process_graphic_stop \meta_process_graphic_stop
\let\meta_process_graphic_start\relax
\let\meta_process_graphic_stop \relax
- \def\meta_process_graphic_figure_start{\startTEXpage\normal_meta_process_graphic_start}%
+ \def\meta_process_graphic_figure_start{\startTEXpage[\c!offset=\v!overlay,\c!align=]\normal_meta_process_graphic_start}%
\def\meta_process_graphic_figure_stop {\normal_meta_process_graphic_stop\stopTEXpage}
\def\MPaskedfigure{all}%
\meta_process_graphic{input "#1" ;}%
@@ -429,16 +432,19 @@
\hskip\cldcontext{fonts.hashes.parameters[font.current()].designsize}sp\relax
\endgroup}
+\definefontsynonym[MetafunDefault][Regular*default]
+
\startMPinitializations % scale is not yet ok
- defaultfont:="\truefontname{Regular}";
- defaultscale:=\the\bodyfontsize/10pt;
+ defaultfont:="\truefontname{MetafunDefault}";
+ % defaultscale:=\the\bodyfontsize/10pt; % only when hard coded 10pt
+ defaultscale:=1;
\stopMPinitializations
% watch out, this is a type1 font because mp can only handle 8 bit fonts
-\startMPinitializations % scale is not yet ok
- defaultfont:="rm-lmtt10";
-\stopMPinitializations
+% \startMPinitializations % scale is not yet ok
+% defaultfont:="rm-lmtt10";
+% \stopMPinitializations
%D A signal that we're in combined \CONTEXT||\METAFUN mode:
@@ -593,7 +599,7 @@
%D \stoptyping
\def\overlaystamp % watch the \MPcolor, since colors can be redefined
- {\overlaywidth:\overlayheight:\overlaydepth:\MPcolor\overlaycolor:\MPcolor\overlaylinecolor}
+ {\overlaywidth:\overlayheight:\overlaydepth:\overlayoffset:\overlaylinewidth:\MPcolor\overlaycolor:\MPcolor\overlaylinecolor}
%D A better approach is to let additional variables play a role
%D in determining the uniqueness. In the next macro, the
@@ -846,10 +852,12 @@
% This will change ...
-\def\MPdataMPDfile{\jobname-mpgraph.mpd}
+\def\MPdataMPDfile{\jobname-mpgraph.mpd} % this one will become obsolete
\def\MPdataMPOfile{\jobname-mpgraph.mpo}
\def\MPdataMPYfile{\jobname-mpgraph.mpy}
+% makempy.registerfile(filename)
+
\startMPinitializations
boolean collapse_data; collapse_data:=true;
def data_mpd_file = "\MPdataMPDfile" enddef ;
@@ -1094,8 +1102,9 @@
CurrentLayout:="\currentlayout";
OverlayWidth:=\overlaywidth;
OverlayHeight:=\overlayheight;
- OverlayDepth:=\overlayheight;
+ OverlayDepth:=\overlaydepth;
OverlayLineWidth:=\overlaylinewidth;
+ OverlayOffset:=\overlayoffset;
%
\m_meta_colo_initializations
%
@@ -1360,6 +1369,67 @@
defaultcolormodel := \ifcase\MPcolormethod1\or1\or3\else3\fi;
\stopMPinitializations
+%D macros
+%D {mprunvar,mpruntab,mprunset}
+%D
+%D \starttyping
+%D \startMPcode
+%D passvariable("version","1.0") ;
+%D passvariable("number",123) ;
+%D passvariable("string","whatever") ;
+%D passvariable("point",(1.5,2.8)) ;
+%D passvariable("triplet",(1/1,1/2,1/3)) ;
+%D passvariable("quad",(1.1,2.2,3.3,4.4)) ;
+%D passvariable("boolean",false) ;
+%D passvariable("path",fullcircle scaled 1cm) ;
+%D draw fullcircle scaled 20pt ;
+%D \stopMPcode
+%D
+%D \ctxlua{inspect(metapost.variables)}
+%D
+%D \MPrunvar{version} \MPruntab{quad}{3} (\MPrunset{triplet}{,})
+%D
+%D $(x,y) = (\MPruntab{point}{1},\MPruntab{point}{2})$
+%D $(x,y) = (\MPrunset{point}{,})$
+%D \stoptyping
+
+\def\MPrunvar #1{\ctxcommand{mprunvar("#1")}} \let\mprunvar\MPrunvar
+\def\MPruntab#1#2{\ctxcommand{mprunvar("#1",\number#2)}} \let\mpruntab\MPruntab
+\def\MPrunset#1#2{\ctxcommand{mprunvar("#1","#2")}} \let\mprunset\MPrunset
+
+%D We also provide an outputless run:
+
+\unexpanded\def\startMPcalculation
+ {\begingroup
+ \setbox\nextbox\hbox\bgroup
+ \dosinglegroupempty\meta_start_calculation}
+
+\def\meta_start_calculation
+ {\iffirstargument
+ \expandafter\meta_start_calculation_instance
+ \else
+ \expandafter\meta_start_calculation_standard
+ \fi}
+
+\def\meta_start_calculation_instance#1#2\stopMPcalculation
+ {\edef\currentMPinstance{#1}%
+ \let\currentMPgraphicname\empty
+ \edef\currentMPformat{\MPinstanceparameter\s!format}%
+ \meta_enable_include
+ \meta_process_graphic{#2;draw origin}%
+ \egroup
+ \endgroup}
+
+\def\meta_start_calculation_standard#1#2\stopMPcalculation
+ {\let\currentMPinstance\defaultMPinstance
+ \let\currentMPgraphicname\empty
+ \edef\currentMPformat{\MPinstanceparameter\s!format}%
+ \meta_process_graphic{#2;draw origin}%
+ \egroup
+ \endgroup}
+
+\let\stopMPcalculation\relax
+
%D \macros
%D {setupMPgraphics}
%D
@@ -1381,6 +1451,23 @@
\setupMPgraphics
[\c!color=\v!local]
+%D This can save some runtime: rename the mpy file from a first run (when stable) to
+%D another file and reuse it. One can also use the original filename, but a copy is
+%D often better.
+%D
+%D \starttyping
+%D \setupMPgraphics
+%D [mpy=\jobname.mpy]
+%D \stoptyping
+
+\appendtoks
+ \edef\p_mpy{\directMPgraphicsparameter{mpy}}%
+ \ifx\p_mpy\empty \else
+ \let\MPdataMPYfile\p_mpy
+ \ctxlua{metapost.makempy.registerfile("\p_mpy")}%
+ \fi
+\to \everysetupMPgraphics
+
%D Done.
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/meta-pag.mkiv b/Master/texmf-dist/tex/context/base/meta-pag.mkiv
index 7124902e744..a25353b188e 100644
--- a/Master/texmf-dist/tex/context/base/meta-pag.mkiv
+++ b/Master/texmf-dist/tex/context/base/meta-pag.mkiv
@@ -41,7 +41,7 @@
% maybe always set as frozen anyway
\startMPinitializations
- def LoadPageState =
+ % def LoadPageState =
OnRightPage:=\MPonrightpage;
OnOddPage:=\MPonoddpage;
RealPageNumber:=\the\realpageno;
@@ -88,6 +88,8 @@
LayoutColumns:=\the\layoutcolumns;
LayoutColumnDistance:=\the\layoutcolumndistance;
LayoutColumnWidth:=\the\layoutcolumnwidth;
+ def LoadPageState =
+ % now always set .. this dummy can move to the mp code
enddef;
\stopMPinitializations
diff --git a/Master/texmf-dist/tex/context/base/meta-pdf.lua b/Master/texmf-dist/tex/context/base/meta-pdf.lua
index ac1ca5b252a..512384450a8 100644
--- a/Master/texmf-dist/tex/context/base/meta-pdf.lua
+++ b/Master/texmf-dist/tex/context/base/meta-pdf.lua
@@ -23,6 +23,8 @@ local report_mptopdf = logs.reporter("graphics","mptopdf")
local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context
+local texgetattribute = tex.getattribute
+
local pdfrgbcode = lpdf.rgbcode
local pdfcmykcode = lpdf.cmykcode
local pdfgraycode = lpdf.graycode
@@ -36,8 +38,8 @@ local mptopdf = metapost.mptopdf
mptopdf.nofconverted = 0
-local f_translate = formatters["1 0 0 0 1 %f %f cm"] -- no %s due to 1e-035 issues
-local f_concat = formatters["%f %f %f %f %f %f cm"] -- no %s due to 1e-035 issues
+local f_translate = formatters["1 0 0 0 1 %F %F cm"] -- no %s due to 1e-035 issues
+local f_concat = formatters["%F %F %F %F %F %F cm"] -- no %s due to 1e-035 issues
local m_path, m_stack, m_texts, m_version, m_date, m_shortcuts = { }, { }, { }, 0, 0, false
@@ -525,8 +527,13 @@ local captures_new = ( space + verbose + procset + preamble )^0
local function parse(m_data)
if find(m_data,"%%%%BeginResource: procset mpost") then
+ -- report_mptopdf("using sparse scanner, case 1")
+ lpegmatch(captures_new,m_data)
+ elseif find(m_data,"%%%%BeginProlog%s*%S+(.-)%%%%EndProlog") then
+ -- report_mptopdf("using sparse scanner, case 2")
lpegmatch(captures_new,m_data)
else
+ -- report_mptopdf("using verbose ps scanner")
lpegmatch(captures_old,m_data)
end
end
@@ -539,7 +546,7 @@ function mptopdf.convertmpstopdf(name)
resetall()
local ok, m_data, n = resolvers.loadbinfile(name, 'tex') -- we need a binary load !
if ok then
- mps.colormodel = tex.attribute[a_colorspace]
+ mps.colormodel = texgetattribute(a_colorspace)
statistics.starttiming(mptopdf)
mptopdf.nofconverted = mptopdf.nofconverted + 1
pdfcode(formatters["\\letterpercent\\space mptopdf begin: n=%s, file=%s"](mptopdf.nofconverted,file.basename(name)))
diff --git a/Master/texmf-dist/tex/context/base/meta-pdf.mkiv b/Master/texmf-dist/tex/context/base/meta-pdf.mkiv
index a8fdaff42eb..3469419d468 100644
--- a/Master/texmf-dist/tex/context/base/meta-pdf.mkiv
+++ b/Master/texmf-dist/tex/context/base/meta-pdf.mkiv
@@ -37,7 +37,7 @@
\def\PDFMPformoffset{\ifdefined\objectoffset\objectoffset\else\zeropoint\fi} % obsolete, will go
-\def\convertMPtoPDF#1#2#3% scaling no longer supported at this level (so #2 & #3 are ignored)
+\unexpanded\def\convertMPtoPDF#1#2#3% scaling no longer supported at this level (so #2 & #3 are ignored)
{\dostarttagged\t!mpgraphic\empty
\naturalvbox attr \imageattribute 1 \bgroup
\message{[MP to PDF]}%
diff --git a/Master/texmf-dist/tex/context/base/meta-tex.lua b/Master/texmf-dist/tex/context/base/meta-tex.lua
index c29498ad109..7a4123abbb3 100644
--- a/Master/texmf-dist/tex/context/base/meta-tex.lua
+++ b/Master/texmf-dist/tex/context/base/meta-tex.lua
@@ -6,7 +6,11 @@ if not modules then modules = { } end modules ['meta-tex'] = {
license = "see context related readme files"
}
---~ local P, C, lpegmatch = lpeg.P, lpeg.C, lpeg.match
+local format, gsub, find, match = string.format, string.gsub, string.find, string.match
+local formatters = string.formatters
+local P, S, R, C, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.match
+
+metapost = metapost or { }
-- local left = P("[")
-- local right = P("]")
@@ -29,10 +33,137 @@ if not modules then modules = { } end modules ['meta-tex'] = {
-- end
-- end
-local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
-
local pattern = Cs((P([[\"]]) + P([["]])/"\\quotedbl{}" + P(1))^0) -- or \char
function metapost.escaped(str)
context(lpegmatch(pattern,str))
end
+
+local simplify = true
+
+-- local function strip(n,e)
+-- -- get rid of e(0)
+-- -- get rid of e(+*)
+-- e = gsub(e,"^+","")
+-- -- remove leading zeros
+-- e = gsub(e,"^([+-]*)0+(%d)","%1%2")
+-- if not simplify then
+-- -- take it as it is
+-- elseif n == "1" then
+-- return format("10^{%s}",e)
+-- end
+-- return format("%s\\times10^{%s}",n,e)
+-- end
+--
+-- function metapost.format_n(fmt,...)
+-- fmt = gsub(fmt,"@","%%")
+-- local initial, hasformat, final = match(fmt,"^(.-)(%%.-[%a])(.-)$")
+-- if hasformat then
+-- str = format(fmt,...)
+-- str = gsub(str,"(.-)e(.-)$",strip)
+-- str = format("%s\\mathematics{%s}%s",initial,str,final)
+-- elseif not find(fmt,"%%") then
+-- str = format("%"..fmt,...)
+-- str = gsub(str,"(.-)e(.-)$",strip)
+-- str = format("\\mathematics{%s}",str)
+-- end
+-- context(str)
+-- end
+
+-- todo: proper lpeg
+
+-- local function strip(n,e)
+-- -- get rid of e(0)
+-- -- get rid of e(+*)
+-- e = gsub(e,"^+","")
+-- -- remove leading zeros
+-- e = gsub(e,"^([+-]*)0+(%d)","%1%2")
+-- if not simplify then
+-- -- take it as it is
+-- elseif n == "1" then
+-- return format("\\mathematics{10^{%s}}",e)
+-- end
+-- return format("\\mathematics{%s\\times10^{%s}}",n,e)
+-- end
+--
+-- function metapost.format_n(fmt,...)
+-- fmt = gsub(fmt,"@","%%")
+-- if find(fmt,"%%") then
+-- str = format(fmt,...)
+-- else -- yes or no
+-- str = format("%"..fmt,...)
+-- end
+-- str = gsub(str,"([%-%+]-[%.%d]+)e([%-%+]-[%.%d]+)",strip)
+-- context(str)
+-- end
+--
+-- function metapost.format_v(fmt,str)
+-- metapost.format_n(fmt,metapost.untagvariable(str,false))
+-- end
+
+-- -- --
+
+local number = C((S("+-")^0 * R("09","..")^1))
+local enumber = number * S("eE") * number
+
+local cleaner = Cs((P("@@")/"@" + P("@")/"%%" + P(1))^0)
+
+function format_n(fmt,...)
+ return
+end
+
+context = context or { exponent = function(...) print(...) end }
+
+function metapost.format_string(fmt,...)
+ context(lpegmatch(cleaner,fmt),...)
+end
+
+function metapost.format_number(fmt,num)
+ local number = tonumber(num)
+ if number then
+ local base, exponent = lpegmatch(enumber,formatters[lpegmatch(cleaner,fmt)](number))
+ if base and exponent then
+ context.MPexponent(base,exponent)
+ else
+ context(number)
+ end
+ else
+ context(tostring(num))
+ end
+end
+
+-- This is experimental and will change!
+
+function metapost.svformat(fmt,str)
+ metapost.format_string(fmt,metapost.untagvariable(str,false))
+end
+
+function metapost.nvformat(fmt,str)
+ metapost.format_number(fmt,metapost.untagvariable(str,false))
+end
+
+-- local function test(fmt,n)
+-- logs.report("mp format test","fmt: %s, n: %s, result: %s, \\exponent{%s}{%s}",fmt,n,
+-- formatters[lpegmatch(cleaner,fmt)](n),
+-- lpegmatch(enumber,formatters[lpegmatch(cleaner,fmt)](n))
+-- )
+-- end
+--
+-- test("@j","1e-8")
+-- test("@j",1e-8)
+-- test("@j","1e+8")
+-- test("@j","1e-10")
+-- test("@j",1e-10)
+-- test("@j","1e+10")
+-- test("@j","1e-12")
+-- test("@j","1e+12")
+-- test("@j","1e-0")
+-- test("@j","1e+0")
+-- test("@j","1")
+-- test("@j test","1")
+-- test("@j","-1")
+-- test("@j","1e-102")
+-- test("@1.4j","1e+102")
+-- test("@j","1.2e+102")
+-- test("@j","1.23e+102")
+-- test("@j","1.234e+102")
diff --git a/Master/texmf-dist/tex/context/base/meta-tex.mkiv b/Master/texmf-dist/tex/context/base/meta-tex.mkiv
index 4defd260cea..e7ed5972787 100644
--- a/Master/texmf-dist/tex/context/base/meta-tex.mkiv
+++ b/Master/texmf-dist/tex/context/base/meta-tex.mkiv
@@ -28,7 +28,7 @@
\let\stopTeXtexts\relax
-\def\TeXtext
+\unexpanded\def\TeXtext
{\dosingleempty\meta_textext}
\def\meta_textext[#1]#2#3% contrary to mkii we don't process yet but we do expand
@@ -68,7 +68,7 @@
\unexpanded\def\definetextext[#1]%
{\def\currenttextext{#1}%
- \doifnextoptionalelse\meta_textext_define_one\meta_textext_define_zero}
+ \doifnextoptionalcselse\meta_textext_define_one\meta_textext_define_zero}
\def\meta_textext_define_one {\setvalue{\??graphictexarguments1:\currenttextext}}
\def\meta_textext_define_zero{\setvalue{\??graphictexarguments0:\currenttextext}}
@@ -79,7 +79,7 @@
{textext.drt("\mpsometxt#1{\ctxlua{metapost.escaped(\!!bs#2\!!es)}}")}
\unexpanded\def\mpsometxt % no _ catcode
- {\doifnextoptionalelse\meta_some_txt_indeed_yes\meta_some_txt_indeed_nop}
+ {\doifnextoptionalcselse\meta_some_txt_indeed_yes\meta_some_txt_indeed_nop}
\def\meta_some_txt_indeed_yes[#1]%
{\def\currenttextext{#1}%
@@ -138,4 +138,8 @@
%
% {\showstruts\useMPgraphic{testgraphic}}
+\unexpanded\def\MPexponent #1#2{\mathematics{#1\times10^{#2}}}
+\unexpanded\def\MPformatted #1#2{\ctxlua{metapost.svformat("#1","#2")}}
+\unexpanded\def\MPgraphformat#1#2{\ctxlua{metapost.nvformat("#1","#2")}}
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/mlib-ctx.lua b/Master/texmf-dist/tex/context/base/mlib-ctx.lua
index 04e0efcb433..fe52187719e 100644
--- a/Master/texmf-dist/tex/context/base/mlib-ctx.lua
+++ b/Master/texmf-dist/tex/context/base/mlib-ctx.lua
@@ -43,6 +43,7 @@ local function setmpsformat(specification)
specification.method = method
end
specification.mpx = metapost.format(instance,format,method)
+ return specification
end
local extensiondata = metapost.extensiondata or storage.allocate { }
@@ -145,9 +146,11 @@ statistics.register("metapost processing time", function()
local nofconverted = metapost.makempy.nofconverted
local elapsedtime = statistics.elapsedtime
local elapsed = statistics.elapsed
- local str = format("%s seconds, loading: %s, execution: %s, n: %s, average: %s",
+ local instances, memory = metapost.getstatistics(true)
+ local str = format("%s seconds, loading: %s, execution: %s, n: %s, average: %s, instances: %i, memory: %0.3f M",
elapsedtime(metapost), elapsedtime(mplib), elapsedtime(metapost.exectime), n,
- elapsedtime((elapsed(metapost) + elapsed(mplib) + elapsed(metapost.exectime)) / n))
+ elapsedtime((elapsed(metapost) + elapsed(mplib) + elapsed(metapost.exectime)) / n),
+ instances, memory/(1024*1024))
if nofconverted > 0 then
return format("%s, external: %s (%s calls)",
str, elapsedtime(metapost.makempy), nofconverted)
diff --git a/Master/texmf-dist/tex/context/base/mlib-ctx.mkiv b/Master/texmf-dist/tex/context/base/mlib-ctx.mkiv
index 75ff4548829..e4c1cb6fe3e 100644
--- a/Master/texmf-dist/tex/context/base/mlib-ctx.mkiv
+++ b/Master/texmf-dist/tex/context/base/mlib-ctx.mkiv
@@ -18,6 +18,7 @@
\registerctxluafile{mlib-run}{1.001}
\registerctxluafile{mlib-ctx}{1.001}
+\registerctxluafile{mlib-lua}{1.001}
\unprotect
diff --git a/Master/texmf-dist/tex/context/base/mlib-lua.lua b/Master/texmf-dist/tex/context/base/mlib-lua.lua
new file mode 100644
index 00000000000..9c7a2e43a42
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/mlib-lua.lua
@@ -0,0 +1,185 @@
+if not modules then modules = { } end modules ['mlib-pdf'] = {
+ version = 1.001,
+ comment = "companion to mlib-ctx.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This is very preliminary code!
+
+local type, tostring, select, loadstring = type, tostring, select, loadstring
+local formatters = string.formatters
+local find, gsub = string.find, string.gsub
+local concat = table.concat
+local lpegmatch = lpeg.match
+
+local report_luarun = logs.reporter("metapost","lua")
+
+local trace_luarun = false trackers.register("metapost.lua",function(v) trace_luarun = v end)
+local trace_enabled = true
+
+mp = mp or { } -- system namespace
+MP = MP or { } -- user namespace
+
+local buffer, n, max = { }, 0, 10 -- we reuse upto max
+
+function mp._f_()
+ if trace_enabled and trace_luarun then
+ local result = concat(buffer," ",1,n)
+ if n > max then
+ buffer = { }
+ end
+ n = 0
+ report_luarun("data: %s",result)
+ return result
+ else
+ if n == 0 then
+ return ""
+ end
+ local result
+ if n == 1 then
+ result = buffer[1]
+ else
+ result = concat(buffer," ",1,n)
+ end
+ if n > max then
+ buffer = { }
+ end
+ n = 0
+ return result
+ end
+end
+
+local f_pair = formatters["(%s,%s)"]
+local f_triplet = formatters["(%s,%s,%s)"]
+local f_quadruple = formatters["(%s,%s,%s,%s)"]
+
+function mp.print(...)
+ for i=1,select("#",...) do
+ n = n + 1
+ buffer[n] = tostring((select(i,...)))
+ end
+end
+
+function mp.pair(x,y)
+ n = n + 1
+ if type(x) == "table" then
+ buffer[n] = f_pair(x[1],x[2])
+ else
+ buffer[n] = f_pair(x,y)
+ end
+end
+
+function mp.triplet(x,y,z)
+ n = n + 1
+ if type(x) == "table" then
+ buffer[n] = f_triplet(x[1],x[2],x[3])
+ else
+ buffer[n] = f_triplet(x,y,z)
+ end
+end
+
+function mp.quadruple(w,x,y,z)
+ n = n + 1
+ if type(w) == "table" then
+ buffer[n] = f_quadruple(w[1],w[2],w[3],w[4])
+ else
+ buffer[n] = f_quadruple(w,x,y,z)
+ end
+end
+
+local replacer = lpeg.replacer("@","%%")
+
+function mp.format(fmt,...)
+ n = n + 1
+ if not find(fmt,"%%") then
+ fmt = lpegmatch(replacer,fmt)
+ end
+ buffer[n] = formatters[fmt](...)
+end
+
+function mp.quoted(fmt,s,...)
+ n = n + 1
+ if s then
+ if not find(fmt,"%%") then
+ fmt = lpegmatch(replacer,fmt)
+ end
+ buffer[n] = '"' .. formatters[fmt](s,...) .. '"'
+ else
+ buffer[n] = '"' .. fmt .. '"'
+ end
+end
+
+local f_code = formatters["%s return mp._f_()"]
+
+function metapost.runscript(code)
+ local f = loadstring(f_code(code))
+ if f then
+ return tostring(f())
+ else
+ return ""
+ end
+end
+
+local cache, n = { }, 0 -- todo: when > n then reset cache or make weak
+
+function metapost.runscript(code)
+ if trace_enabled and trace_luarun then
+ report_luarun("code: %s",code)
+ end
+ if n > 100 then
+ cache = nil -- forget about caching
+ local f = loadstring(f_code(code))
+ if f then
+ return tostring(f())
+ else
+ return ""
+ end
+ else
+ local f = cache[code]
+ if f then
+ return tostring(f())
+ else
+ f = loadstring(f_code(code))
+ if f then
+ n = n + 1
+ cache[code] = f
+ return tostring(f())
+ else
+ return ""
+ end
+ end
+ end
+end
+
+-- function metapost.initializescriptrunner(mpx)
+-- mp.numeric = function(s) return mpx:get_numeric(s) end
+-- mp.string = function(s) return mpx:get_string (s) end
+-- mp.boolean = function(s) return mpx:get_boolean(s) end
+-- mp.number = mp.numeric
+-- end
+
+local get_numeric = mplib.get_numeric
+local get_string = mplib.get_string
+local get_boolean = mplib.get_boolean
+local get_number = get_numeric
+
+-- function metapost.initializescriptrunner(mpx)
+-- mp.numeric = function(s) return get_numeric(mpx,s) end
+-- mp.string = function(s) return get_string (mpx,s) end
+-- mp.boolean = function(s) return get_boolean(mpx,s) end
+-- mp.number = mp.numeric
+-- end
+
+local currentmpx = nil
+
+mp.numeric = function(s) return get_numeric(currentmpx,s) end
+mp.string = function(s) return get_string (currentmpx,s) end
+mp.boolean = function(s) return get_boolean(currentmpx,s) end
+mp.number = mp.numeric
+
+function metapost.initializescriptrunner(mpx,trialrun)
+ currentmpx = mpx
+ trace_enabled = not trialrun
+end
diff --git a/Master/texmf-dist/tex/context/base/mlib-pdf.lua b/Master/texmf-dist/tex/context/base/mlib-pdf.lua
index 96330995105..d25dde88469 100644
--- a/Master/texmf-dist/tex/context/base/mlib-pdf.lua
+++ b/Master/texmf-dist/tex/context/base/mlib-pdf.lua
@@ -10,13 +10,16 @@ if not modules then modules = { } end modules ['mlib-pdf'] = {
local format, concat, gsub = string.format, table.concat, string.gsub
local abs, sqrt, round = math.abs, math.sqrt, math.round
-local setmetatable = setmetatable
-local Cf, C, Cg, Ct, P, S, lpegmatch = lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.match
+local setmetatable, rawset, tostring, tonumber, type = setmetatable, rawset, tostring, tonumber, type
+local P, S, C, Ct, Cc, Cg, Cf, Carg = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Carg
+local lpegmatch = lpeg.match
local formatters = string.formatters
local report_metapost = logs.reporter("metapost")
-local mplib, context = mplib, context
+local trace_variables = false trackers.register("metapost.variables",function(v) trace_variables = v end)
+
+local mplib, context = mplib, context
local allocate = utilities.storage.allocate
@@ -30,15 +33,26 @@ metapost.flushers = metapost.flushers or { }
local pdfflusher = { }
metapost.flushers.pdf = pdfflusher
-metapost.multipass = false
+metapost.multipass = false -- to be stacked
metapost.n = 0
-metapost.optimize = true -- false
+metapost.optimize = true -- false
local experiment = true -- uses context(node) that already does delayed nodes
-
-local savedliterals = nil -- needs checking
+local savedliterals = nil -- needs checking
local mpsliteral = nodes.pool.register(node.new("whatsit",nodes.whatsitcodes.pdfliteral)) -- pdfliteral.mode = 1
+local f_f = formatters["%F"]
+
+local f_m = formatters["%F %F m"]
+local f_c = formatters["%F %F %F %F %F %F c"]
+local f_l = formatters["%F %F l"]
+local f_cm = formatters["%F %F %F %F %F %F cm"]
+local f_M = formatters["%F M"]
+local f_j = formatters["%i j"]
+local f_J = formatters["%i J"]
+local f_d = formatters["[%s] %F d"]
+local f_w = formatters["%F w"]
+
local pdfliteral = function(s)
local literal = copy_node(mpsliteral)
literal.data = s
@@ -117,7 +131,7 @@ end
function pdfflusher.startfigure(n,llx,lly,urx,ury,message)
savedliterals = nil
metapost.n = metapost.n + 1
- context.startMPLIBtoPDF(llx,lly,urx,ury)
+ context.startMPLIBtoPDF(f_f(llx),f_f(lly),f_f(urx),f_f(ury))
if message then pdfflusher.comment(message) end
end
@@ -190,11 +204,11 @@ local function flushnormalpath(path, t, open)
nt = nt + 1
pth = path[i]
if not ith then
- t[nt] = formatters["%f %f m"](pth.x_coord,pth.y_coord)
+ t[nt] = f_m(pth.x_coord,pth.y_coord)
elseif curved(ith,pth) then
- t[nt] = formatters["%f %f %f %f %f %f c"](ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord)
+ t[nt] = f_c(ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord)
else
- t[nt] = formatters["%f %f l"](pth.x_coord,pth.y_coord)
+ t[nt] = f_l(pth.x_coord,pth.y_coord)
end
ith = pth
end
@@ -202,15 +216,15 @@ local function flushnormalpath(path, t, open)
nt = nt + 1
local one = path[1]
if curved(pth,one) then
- t[nt] = formatters["%f %f %f %f %f %f c"](pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord )
+ t[nt] = f_c(pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord )
else
- t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord)
+ t[nt] = f_l(one.x_coord,one.y_coord)
end
elseif #path == 1 then
-- special case .. draw point
local one = path[1]
nt = nt + 1
- t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord)
+ t[nt] = f_l(one.x_coord,one.y_coord)
end
return t
end
@@ -224,18 +238,18 @@ local function flushconcatpath(path, t, open)
nt = 0
end
nt = nt + 1
- t[nt] = formatters["%f %f %f %f %f %f cm"](sx,rx,ry,sy,tx,ty)
+ t[nt] = f_cm(sx,rx,ry,sy,tx,ty)
for i=1,#path do
nt = nt + 1
pth = path[i]
if not ith then
- t[nt] = formatters["%f %f m"](mpconcat(pth.x_coord,pth.y_coord))
+ t[nt] = f_m(mpconcat(pth.x_coord,pth.y_coord))
elseif curved(ith,pth) then
local a, b = mpconcat(ith.right_x,ith.right_y)
local c, d = mpconcat(pth.left_x,pth.left_y)
- t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(pth.x_coord,pth.y_coord))
+ t[nt] = f_c(a,b,c,d,mpconcat(pth.x_coord,pth.y_coord))
else
- t[nt] = formatters["%f %f l"](mpconcat(pth.x_coord, pth.y_coord))
+ t[nt] = f_l(mpconcat(pth.x_coord, pth.y_coord))
end
ith = pth
end
@@ -245,15 +259,15 @@ local function flushconcatpath(path, t, open)
if curved(pth,one) then
local a, b = mpconcat(pth.right_x,pth.right_y)
local c, d = mpconcat(one.left_x,one.left_y)
- t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(one.x_coord, one.y_coord))
+ t[nt] = f_c(a,b,c,d,mpconcat(one.x_coord, one.y_coord))
else
- t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord))
+ t[nt] = f_l(mpconcat(one.x_coord,one.y_coord))
end
elseif #path == 1 then
-- special case .. draw point
nt = nt + 1
local one = path[1]
- t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord))
+ t[nt] = f_l(mpconcat(one.x_coord,one.y_coord))
end
return t
end
@@ -268,7 +282,97 @@ metapost.flushnormalpath = flushnormalpath
-- performance penalty, but so is passing extra arguments (result, flusher, after)
-- and returning stuff.
-local function ignore() end
+local ignore = function () end
+
+local space = P(" ")
+local equal = P("=")
+local key = C((1-equal)^1) * equal
+local newline = S("\n\r")^1
+local number = (((1-space-newline)^1) / tonumber) * (space^0)
+
+local p_number = number
+local p_string = C((1-newline)^0)
+local p_boolean = P("false") * Cc(false) + P("true") * Cc(true)
+local p_set = Ct(number^1)
+local p_path = Ct(Ct(number * number^-5)^1)
+
+-- local variable =
+-- P("1:") * key * p_number
+-- + P("2:") * key * p_string
+-- + P("3:") * key * p_boolean
+-- + S("4568") * P(":") * key * p_set
+-- + P("7:") * key * p_path
+--
+-- local pattern_key = Cf ( Carg(1) * (Cg(variable * newline^0)^0), rawset)
+
+local variable =
+ P("1:") * p_number
+ + P("2:") * p_string
+ + P("3:") * p_boolean
+ + S("4568") * P(":") * p_set
+ + P("7:") * p_path
+
+local pattern_tab = Cf ( Carg(1) * (Cg(variable * newline^0)^0), rawset)
+
+local variable =
+ P("1:") * p_number
+ + P("2:") * p_string
+ + P("3:") * p_boolean
+ + S("4568") * P(":") * number^1
+ + P("7:") * (number * number^-5)^1
+
+local pattern_lst = (variable * newline^0)^0
+
+metapost.variables = { } -- to be stacked
+metapost.llx = 0 -- to be stacked
+metapost.lly = 0 -- to be stacked
+metapost.urx = 0 -- to be stacked
+metapost.ury = 0 -- to be stacked
+
+function commands.mprunvar(key,n) -- should be defined in another lib
+ local value = metapost.variables[key]
+ if value ~= nil then
+ local tvalue = type(value)
+ if tvalue == "table" then
+ local ntype = type(n)
+ if ntype == "number" then
+ context(value[n])
+ elseif ntype == "string" then
+ context(concat(value,n))
+ else
+ context(concat(value," "))
+ end
+ elseif tvalue == "number" or tvalue == "boolean" then
+ context(tostring(value))
+ elseif tvalue == "string" then
+ context(value)
+ end
+ end
+end
+
+function metapost.untagvariable(str,variables) -- will be redone
+ if variables == false then
+ return lpegmatch(pattern_lst,str)
+ else
+ return lpegmatch(pattern_tab,str,1,variables or { })
+ end
+end
+
+-- function metapost.processspecial(str)
+-- lpegmatch(pattern_key,object.prescript,1,variables)
+-- end
+
+function metapost.processspecial(str)
+ local code = loadstring(str)
+ if code then
+ if trace_variables then
+ report_metapost("executing special code: %s",str)
+ end
+ code()
+ else
+ report_metapost("invalid special code: %s",str)
+ end
+end
function metapost.flush(result,flusher,askedfig)
if result then
@@ -283,15 +387,30 @@ function metapost.flush(result,flusher,askedfig)
local stopfigure = flusher.stopfigure
local flushfigure = flusher.flushfigure
local textfigure = flusher.textfigure
- for f=1, #figures do
+ local processspecial = flusher.processspecial or metapost.processspecial
+ for f=1,#figures do
local figure = figures[f]
local objects = getobjects(result,figure,f)
- local fignum = figure:charcode() or 0
+ local fignum = figure:charcode() or 0
if askedfig == "direct" or askedfig == "all" or askedfig == fignum then
local t = { }
local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
local bbox = figure:boundingbox()
local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4]
+ local variables = { }
+ metapost.variables = variables
+ metapost.properties = {
+ llx = llx,
+ lly = lly,
+ urx = urx,
+ ury = ury,
+ slot = figure:charcode(),
+ width = figure:width(),
+ height = figure:height(),
+ depth = figure:depth(),
+ italic = figure:italcorr(),
+ }
+ -- replaced by the above
metapost.llx = llx
metapost.lly = lly
metapost.urx = urx
@@ -308,8 +427,12 @@ function metapost.flush(result,flusher,askedfig)
for o=1,#objects do
local object = objects[o]
local objecttype = object.type
- if objecttype == "start_bounds" or objecttype == "stop_bounds" or objecttype == "special" then
+ if objecttype == "start_bounds" or objecttype == "stop_bounds" then
-- skip
+ elseif objecttype == "special" then
+ if processspecial then
+ processspecial(object.prescript)
+ end
elseif objecttype == "start_clip" then
t[#t+1] = "q"
flushnormalpath(object.path,t,false)
@@ -320,7 +443,7 @@ function metapost.flush(result,flusher,askedfig)
elseif objecttype == "text" then
t[#t+1] = "q"
local ot = object.transform -- 3,4,5,6,1,2
- t[#t+1] = formatters["%f %f %f %f %f %f cm"](ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: formatters["%f %f m %f %f %f %f 0 0 cm"](unpack(ot))
+ t[#t+1] = f_cm(ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: formatters["%F %F m %F %F %F %F 0 0 cm"](unpack(ot))
flushfigure(t) -- flush accumulated literals
t = { }
textfigure(object.font,object.dsize,object.text,object.width,object.height,object.depth)
@@ -345,21 +468,21 @@ function metapost.flush(result,flusher,askedfig)
local ml = object.miterlimit
if ml and ml ~= miterlimit then
miterlimit = ml
- t[#t+1] = formatters["%f M"](ml)
+ t[#t+1] = f_M(ml)
end
local lj = object.linejoin
if lj and lj ~= linejoin then
linejoin = lj
- t[#t+1] = formatters["%i j"](lj)
+ t[#t+1] = f_j(lj)
end
local lc = object.linecap
if lc and lc ~= linecap then
linecap = lc
- t[#t+1] = formatters["%i J"](lc)
+ t[#t+1] = f_J(lc)
end
local dl = object.dash
if dl then
- local d = formatters["[%s] %f d"](concat(dl.dashes or {}," "),dl.offset)
+ local d = f_d(concat(dl.dashes or {}," "),dl.offset)
if d ~= dashed then
dashed = d
t[#t+1] = dashed
@@ -375,7 +498,7 @@ function metapost.flush(result,flusher,askedfig)
if pen then
if pen.type == 'elliptical' then
transformed, penwidth = pen_characteristics(original) -- boolean, value
- t[#t+1] = formatters["%f w"](penwidth) -- todo: only if changed
+ t[#t+1] = f_w(penwidth) -- todo: only if changed
if objecttype == 'fill' then
objecttype = 'both'
end
@@ -395,7 +518,7 @@ function metapost.flush(result,flusher,askedfig)
if objecttype == "fill" then
t[#t+1] = "h f"
elseif objecttype == "outline" then
- t[#t+1] = (open and "S") or "h S"
+ t[#t+1] = open and "S" or "h S"
elseif objecttype == "both" then
t[#t+1] = "h B"
end
@@ -416,7 +539,7 @@ function metapost.flush(result,flusher,askedfig)
if objecttype == "fill" then
t[#t+1] = "h f"
elseif objecttype == "outline" then
- t[#t+1] = (open and "S") or "h S"
+ t[#t+1] = open and "S" or "h S"
elseif objecttype == "both" then
t[#t+1] = "h B"
end
diff --git a/Master/texmf-dist/tex/context/base/mlib-pps.lua b/Master/texmf-dist/tex/context/base/mlib-pps.lua
index 93bddc2dd4c..ce95d5ca734 100644
--- a/Master/texmf-dist/tex/context/base/mlib-pps.lua
+++ b/Master/texmf-dist/tex/context/base/mlib-pps.lua
@@ -6,20 +6,23 @@ if not modules then modules = { } end modules ['mlib-pps'] = {
license = "see context related readme files",
}
--- todo: make a hashed textext variant where we only process the text once (normally
--- we cannot assume that no macros are involved which influence a next textext
+-- todo: pass multipass nicer
local format, gmatch, match, split = string.format, string.gmatch, string.match, string.split
local tonumber, type = tonumber, type
local round = math.round
-local insert, concat = table.insert, table.concat
+local insert, remove, concat = table.insert, table.remove, table.concat
local Cs, Cf, C, Cg, Ct, P, S, V, Carg = lpeg.Cs, lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.V, lpeg.Carg
-local lpegmatch = lpeg.match
+local lpegmatch, tsplitat, tsplitter = lpeg.match, lpeg.tsplitat, lpeg.tsplitter
local formatters = string.formatters
local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context
-local texbox = tex.box
+local context = context
+local context_setvalue = context.setvalue
+
+local texgetbox = tex.getbox
+local texsetbox = tex.setbox
local copy_list = node.copy_list
local free_list = node.flush_list
local setmetatableindex = table.setmetatableindex
@@ -82,10 +85,13 @@ function metapost.setoutercolor(mode,colormodel,colorattribute,transparencyattri
innertransparency = outertransparency -- not yet used
end
-local f_gray = formatters["%.3f g %.3f G"]
-local f_rgb = formatters["%.3f %.3f %.3f rg %.3f %.3f %.3f RG"]
-local f_cmyk = formatters["%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K"]
-local f_cm = formatters["q %f %f %f %f %f %f cm"]
+local f_f = formatters["%F"]
+local f_f3 = formatters["%.3F"]
+
+local f_gray = formatters["%.3F g %.3F G"]
+local f_rgb = formatters["%.3F %.3F %.3F rg %.3F %.3F %.3F RG"]
+local f_cmyk = formatters["%.3F %.3F %.3F %.3F k %.3F %.3F %.3F %.3F K"]
+local f_cm = formatters["q %F %F %F %F %F %F cm"]
local f_shade = formatters["MpSh%s"]
local function checked_color_pair(color,...)
@@ -108,9 +114,9 @@ end
--~
-local specificationsplitter = lpeg.tsplitat(" ")
-local colorsplitter = lpeg.tsplitter(":",tonumber) -- no need for :
-local domainsplitter = lpeg.tsplitter(" ",tonumber)
+local specificationsplitter = tsplitat(" ")
+local colorsplitter = tsplitter(":",tonumber) -- no need for :
+local domainsplitter = tsplitter(" ",tonumber)
local centersplitter = domainsplitter
local coordinatesplitter = domainsplitter
@@ -147,7 +153,7 @@ local function spotcolorconverter(parent, n, d, p)
return pdfcolor(colors.model,registercolor(nil,'spot',parent,n,d,p)), outercolor
end
-local commasplitter = lpeg.tsplitat(",")
+local commasplitter = tsplitat(",")
local function checkandconvertspot(n_a,f_a,c_a,v_a,n_b,f_b,c_b,v_b)
-- must be the same but we don't check
@@ -211,52 +217,65 @@ local function checkandconvert(ca,cb)
end
end
-local current_format, current_graphic, current_initializations
-
-metapost.multipass = false
+local stack = { } -- quick hack, we will pass topofstack around
+local top = nil
+local nofruns = 0 -- askedfig: "all", "first", number
-local textexts = { } -- all boxes, optionally with a different color
-local texslots = { } -- references to textexts in order or usage
-local texorder = { } -- references to textexts by mp index
-local textrial = 0
-local texfinal = 0
-local scratchbox = 0
+local function startjob(texmode)
+ top = {
+ textexts = { }, -- all boxes, optionally with a different color
+ texslots = { }, -- references to textexts in order or usage
+ texorder = { }, -- references to textexts by mp index
+ textrial = 0,
+ texfinal = 0,
+ -- used by tx plugin
+ texhash = { },
+ texlast = 0,
+ texmode = texmode, -- some day we can then skip all pre/postscripts
+ }
+ insert(stack,top)
+ if trace_runs then
+ report_metapost("starting run at level %i",#stack)
+ end
+ return top
+end
-local function freeboxes()
- for n, box in next, textexts do
- local tn = textexts[n]
- if tn then
+local function stopjob()
+ if top then
+ for n, tn in next, top.textexts do
free_list(tn)
- -- texbox[scratchbox] = tn
- -- texbox[scratchbox] = nil -- this frees too
if trace_textexts then
report_textexts("freeing box %s",n)
end
end
+ if trace_runs then
+ report_metapost("stopping run at level %i",#stack)
+ end
+ remove(stack)
+ top = stack[#stack]
+ return top
end
- textexts = { }
- texslots = { }
- texorder = { }
- textrial = 0
- texfinal = 0
end
-metapost.resettextexts = freeboxes
+function metapost.settextexts () end -- obsolete
+function metapost.resettextexts() end -- obsolete
+
+-- end of new
function metapost.settext(box,slot)
- textexts[slot] = copy_list(texbox[box])
- texbox[box] = nil
+ top.textexts[slot] = copy_list(texgetbox(box))
+ texsetbox(box,nil)
-- this will become
- -- textexts[slot] = texbox[box]
+ -- top.textexts[slot] = texgetbox(box)
-- unsetbox(box)
end
function metapost.gettext(box,slot)
- texbox[box] = copy_list(textexts[slot])
+ texsetbox(box,copy_list(top.textexts[slot]))
if trace_textexts then
report_textexts("putting text %s in box %s",slot,box)
end
- -- textexts[slot] = nil -- no, pictures can be placed several times
+ -- top.textexts[slot] = nil -- no, pictures can be placed several times
end
-- rather generic pdf, so use this elsewhere too it no longer pays
@@ -469,8 +488,8 @@ local factor = 65536*(7227/7200)
function metapost.edefsxsy(wd,ht,dp) -- helper for figure
local hd = ht + dp
- context.setvalue("sx",wd ~= 0 and factor/wd or 0)
- context.setvalue("sy",hd ~= 0 and factor/hd or 0)
+ context_setvalue("sx",wd ~= 0 and factor/wd or 0)
+ context_setvalue("sy",hd ~= 0 and factor/hd or 0)
end
local function sxsy(wd,ht,dp) -- helper for text
@@ -489,7 +508,10 @@ local do_safeguard = ";"
local f_text_data = formatters["mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfun_tt_d[%i] := %f ;"]
function metapost.textextsdata()
- local t, nt, n = { }, 0, 0
+ local texorder = top.texorder
+ local textexts = top.textexts
+ local collected = { }
+ local nofcollected = 0
for n=1,#texorder do
local box = textexts[texorder[n]]
if box then
@@ -497,25 +519,23 @@ function metapost.textextsdata()
if trace_textexts then
report_textexts("passed data item %s: (%p,%p,%p)",n,wd,ht,dp)
end
- nt = nt + 1
- t[nt] = f_text_data(n,wd,n,ht,n,dp)
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = f_text_data(n,wd,n,ht,n,dp)
else
break
end
end
--- inspect(t)
- return t
+ return collected
end
-metapost.intermediate = metapost.intermediate or {}
-metapost.intermediate.actions = metapost.intermediate.actions or {}
-metapost.intermediate.needed = false
+metapost.intermediate = metapost.intermediate or { }
+metapost.intermediate.actions = metapost.intermediate.actions or { }
metapost.method = 1 -- 1:dumb 2:clever
-- maybe we can latelua the texts some day
-local nofruns = 0 -- askedfig: "all", "first", number
+local processmetapost = metapost.process
local function checkaskedfig(askedfig) -- return askedfig, wrappit
if not askedfig then
@@ -534,18 +554,42 @@ local function checkaskedfig(askedfig) -- return askedfig, wrappit
end
end
-function metapost.graphic_base_pass(specification)
- local mpx = specification.mpx -- mandate
- local data = specification.data or ""
- local definitions = specification.definitions or ""
--- local extensions = metapost.getextensions(specification.instance,specification.useextensions)
- local extensions = specification.extensions or ""
- local inclusions = specification.inclusions or ""
+local function extrapass()
+ if trace_runs then
+ report_metapost("second run of job %s, asked figure %a",top.nofruns,top.askedfig)
+ end
+ processmetapost(top.mpx, {
+ top.wrappit and do_begin_fig or "",
+ no_trial_run,
+ concat(metapost.textextsdata()," ;\n"),
+ top.initializations,
+ do_safeguard,
+ top.data,
+ top.wrappit and do_end_fig or "",
+ }, false, nil, false, true, top.askedfig)
+ -- context.MPLIBresettexts() -- must happen afterwards
+end
+
+function metapost.graphic_base_pass(specification) -- name will change (see mlib-ctx.lua)
+ local top = startjob(true)
+ --
+ local mpx = specification.mpx -- mandate
+ local data = specification.data or ""
+ local definitions = specification.definitions or ""
+ -- local extensions = metapost.getextensions(specification.instance,specification.useextensions)
+ local extensions = specification.extensions or ""
+ local inclusions = specification.inclusions or ""
local initializations = specification.initializations or ""
- local askedfig = specification.figure -- no default else no wrapper
+ local askedfig = specification.figure -- no default else no wrapper
--
- nofruns = nofruns + 1
local askedfig, wrappit = checkaskedfig(askedfig)
+ --
+ nofruns = nofruns + 1
+ --
+ top.askedfig = askedfig
+ top.wrappit = wrappit
+ top.nofruns = nofruns
+ --
local done_1, done_2, done_3, forced_1, forced_2, forced_3
data, done_1, forced_1 = checktexts(data)
-- we had preamble = extensions + inclusions
@@ -559,12 +603,12 @@ function metapost.graphic_base_pass(specification)
else
inclusions, done_3, forced_3 = checktexts(inclusions)
end
- metapost.intermediate.needed = false
- metapost.multipass = false -- no needed here
- current_format = mpx
- current_graphic = data
- current_initializations = initializations
- local method = metapost.method
+ top.intermediate = false
+ top.multipass = false -- no needed here
+ top.mpx = mpx
+ top.data = data
+ top.initializations = initializations
+ local method = metapost.method
if trace_runs then
if method == 1 then
report_metapost("forcing two runs due to library configuration")
@@ -585,19 +629,19 @@ function metapost.graphic_base_pass(specification)
report_metapost("first run of job %s, asked figure %a",nofruns,askedfig)
end
-- first true means: trialrun, second true means: avoid extra run if no multipass
- local flushed = metapost.process(mpx, {
+ local flushed = processmetapost(mpx, {
definitions,
extensions,
inclusions,
wrappit and do_begin_fig or "",
do_first_run,
do_trial_run,
- current_initializations,
+ initializations,
do_safeguard,
- current_graphic,
+ data,
wrappit and do_end_fig or "",
}, true, nil, not (forced_1 or forced_2 or forced_3), false, askedfig)
- if metapost.intermediate.needed then
+ if top.intermediate then
for _, action in next, metapost.intermediate.actions do
action()
end
@@ -605,63 +649,70 @@ function metapost.graphic_base_pass(specification)
if not flushed or not metapost.optimize then
-- tricky, we can only ask once for objects and therefore
-- we really need a second run when not optimized
- context.MPLIBextrapass(askedfig)
+ -- context.MPLIBextrapass(askedfig)
+ context(extrapass)
end
else
if trace_runs then
report_metapost("running job %s, asked figure %a",nofruns,askedfig)
end
- metapost.process(mpx, {
+ processmetapost(mpx, {
preamble,
wrappit and do_begin_fig or "",
do_first_run,
no_trial_run,
- current_initializations,
+ initializations,
do_safeguard,
- current_graphic,
+ data,
wrappit and do_end_fig or "",
}, false, nil, false, false, askedfig)
end
+ context(stopjob)
end
-function metapost.graphic_extra_pass(askedfig)
- if trace_runs then
- report_metapost("second run of job %s, asked figure %a",nofruns,askedfig)
- end
- local askedfig, wrappit = checkaskedfig(askedfig)
- metapost.process(current_format, {
- wrappit and do_begin_fig or "",
- no_trial_run,
- concat(metapost.textextsdata()," ;\n"),
- current_initializations,
- do_safeguard,
- current_graphic,
- wrappit and do_end_fig or "",
- }, false, nil, false, true, askedfig)
- context.MPLIBresettexts() -- must happen afterwards
+function metapost.process(...)
+ startjob(false)
+ processmetapost(...)
+ stopjob()
end
local start = [[\starttext]]
-local preamble = [[\long\def\MPLIBgraphictext#1{\startTEXpage[scale=10000]#1\stopTEXpage}]]
+local preamble = [[\def\MPLIBgraphictext#1{\startTEXpage[scale=10000]#1\stopTEXpage}]]
local stop = [[\stoptext]]
+local mpyfilename = nil
+
+function makempy.registerfile(filename)
+ mpyfilename = filename
+end
+
function makempy.processgraphics(graphics)
- if #graphics > 0 then
- makempy.nofconverted = makempy.nofconverted + 1
- starttiming(makempy)
- local mpofile = tex.jobname .. "-mpgraph"
- local mpyfile = file.replacesuffix(mpofile,"mpy")
- local pdffile = file.replacesuffix(mpofile,"pdf")
- local texfile = file.replacesuffix(mpofile,"tex")
- io.savedata(texfile, { start, preamble, metapost.tex.get(), concat(graphics,"\n"), stop }, "\n")
- local command = format("context --once %s %s", (tex.interactionmode == 0 and "--batchmode") or "", texfile)
+ if #graphics == 0 then
+ return
+ end
+ if mpyfilename and io.exists(mpyfilename) then
+ report_metapost("using file: %s",mpyfilename)
+ return
+ end
+ makempy.nofconverted = makempy.nofconverted + 1
+ starttiming(makempy)
+ local mpofile = tex.jobname .. "-mpgraph"
+ local mpyfile = file.replacesuffix(mpofile,"mpy")
+ local pdffile = file.replacesuffix(mpofile,"pdf")
+ local texfile = file.replacesuffix(mpofile,"tex")
+ io.savedata(texfile, { start, preamble, metapost.tex.get(), concat(graphics,"\n"), stop }, "\n")
+ local command = format("context --once %s %s", (tex.interactionmode == 0 and "--batchmode") or "", texfile)
+ os.execute(command)
+ if io.exists(pdffile) then
+ command = format("pstoedit -ssp -dt -f mpost %s %s", pdffile, mpyfile)
+ logs.newline()
+ report_metapost("running: %s",command)
+ logs.newline()
os.execute(command)
- if io.exists(pdffile) then
- command = format("pstoedit -ssp -dt -f mpost %s %s", pdffile, mpyfile)
- os.execute(command)
+ if io.exists(mpyfile) then
local result, r = { }, 0
- if io.exists(mpyfile) then
- local data = io.loaddata(mpyfile)
+ local data = io.loaddata(mpyfile)
+ if data and #data > 0 then
for figure in gmatch(data,"beginfig(.-)endfig") do
r = r + 1
result[r] = formatters["begingraphictextfig%sendgraphictextfig ;\n"](figure)
@@ -669,8 +720,8 @@ function makempy.processgraphics(graphics)
io.savedata(mpyfile,concat(result,""))
end
end
- stoptiming(makempy)
end
+ stoptiming(makempy)
end
-- -- the new plugin handler -- --
@@ -687,9 +738,9 @@ local resetteractions = sequencers.new { arguments = "t" }
local analyzeractions = sequencers.new { arguments = "object,prescript" }
local processoractions = sequencers.new { arguments = "object,prescript,before,after" }
-appendgroup(resetteractions, "system")
-appendgroup(analyzeractions, "system")
-appendgroup(processoractions, "system")
+appendgroup(resetteractions, "system")
+appendgroup(analyzeractions, "system")
+appendgroup(processoractions,"system")
-- later entries come first
@@ -728,48 +779,56 @@ end
-- end
function metapost.pluginactions(what,t,flushfigure) -- before/after object, depending on what
- for i=1,#what do
- local wi = what[i]
- if type(wi) == "function" then
- -- assume injection
- flushfigure(t) -- to be checked: too many 0 g 0 G
- t = { }
- wi()
- else
- t[#t+1] = wi
+ if top.texmode then
+ for i=1,#what do
+ local wi = what[i]
+ if type(wi) == "function" then
+ -- assume injection
+ flushfigure(t) -- to be checked: too many 0 g 0 G
+ t = { }
+ wi()
+ else
+ t[#t+1] = wi
+ end
end
+ return t
end
- return t
end
function metapost.resetplugins(t) -- intialize plugins, before figure
- -- plugins can have been added
- resetter = resetteractions .runner
- analyzer = analyzeractions .runner
- processor = processoractions .runner
- -- let's apply one runner
- resetter(t)
+ if top.texmode then
+ -- plugins can have been added
+ resetter = resetteractions.runner
+ analyzer = analyzeractions.runner
+ processor = processoractions.runner
+ -- let's apply one runner
+ resetter(t)
+ end
end
function metapost.analyzeplugins(object) -- each object (first pass)
- local prescript = object.prescript -- specifications
- if prescript and #prescript > 0 then
- return analyzer(object,splitprescript(prescript))
+ if top.texmode then
+ local prescript = object.prescript -- specifications
+ if prescript and #prescript > 0 then
+ return analyzer(object,splitprescript(prescript))
+ end
end
end
function metapost.processplugins(object) -- each object (second pass)
- local prescript = object.prescript -- specifications
- if prescript and #prescript > 0 then
- local before = { }
- local after = { }
- processor(object,splitprescript(prescript),before,after)
- return #before > 0 and before, #after > 0 and after
- else
- local c = object.color
- if c and #c > 0 then
- local b, a = colorconverter(c)
- return { b }, { a }
+ if top.texmode then
+ local prescript = object.prescript -- specifications
+ if prescript and #prescript > 0 then
+ local before = { }
+ local after = { }
+ processor(object,splitprescript(prescript),before,after)
+ return #before > 0 and before, #after > 0 and after
+ else
+ local c = object.color
+ if c and #c > 0 then
+ local b, a = colorconverter(c)
+ return { b }, { a }
+ end
end
end
end
@@ -799,76 +858,93 @@ local function cl_reset(t)
t[#t+1] = metapost.colorinitializer() -- only color
end
-local tx_hash = { }
-local tx_last = 0
-
local function tx_reset()
- tx_hash = { }
- tx_last = 0
+ if top then
+ top.texhash = { }
+ top.texlast = 0
+ end
end
local fmt = formatters["%s %s %s % t"]
+----- pat = tsplitat(":")
+local pat = lpeg.tsplitter(":",tonumber) -- so that %F can do its work
+
+local ctx_MPLIBsetNtext = context.MPLIBsetNtext
+local ctx_MPLIBsetCtext = context.MPLIBsetCtext
local function tx_analyze(object,prescript) -- todo: hash content and reuse them
local tx_stage = prescript.tx_stage
if tx_stage == "trial" then
- textrial = textrial + 1
+ local tx_trial = top.textrial + 1
+ top.textrial = tx_trial
local tx_number = tonumber(prescript.tx_number)
local s = object.postscript or ""
local c = object.color -- only simple ones, no transparency
+ if #c == 0 then
+ local txc = prescript.tx_color
+ if txc then
+ c = lpegmatch(pat,txc)
+ end
+ end
local a = prescript.tr_alternative
local t = prescript.tr_transparency
- local h = fmt(tx_number,a or "?",t or "?",c)
- local n = tx_hash[h] -- todo: hashed variant with s (nicer for similar labels)
+ local h = fmt(tx_number,a or "-",t or "-",c or "-")
+ local n = top.texhash[h] -- todo: hashed variant with s (nicer for similar labels)
if not n then
- tx_last = tx_last + 1
+ local tx_last = top.texlast + 1
+ top.texlast = tx_last
if not c then
- -- no color
+ ctx_MPLIBsetNtext(tx_last,s)
elseif #c == 1 then
if a and t then
- s = formatters["\\directcolored[s=%f,a=%f,t=%f]%s"](c[1],a,t,s)
+ ctx_MPLIBsetCtext(tx_last,formatters["s=%F,a=%F,t=%F"](c[1],a,t),s)
else
- s = formatters["\\directcolored[s=%f]%s"](c[1],s)
+ ctx_MPLIBsetCtext(tx_last,formatters["s=%F"](c[1]),s)
end
elseif #c == 3 then
if a and t then
- s = formatters["\\directcolored[r=%f,g=%f,b=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],a,t,s)
+ ctx_MPLIBsetCtext(tx_last,formatters["r=%F,g=%F,b=%F,a=%F,t=%F"](c[1],c[2],c[3],a,t),s)
else
- s = formatters["\\directcolored[r=%f,g=%f,b=%f]%s"](c[1],c[2],c[3],s)
+ ctx_MPLIBsetCtext(tx_last,formatters["r=%F,g=%F,b=%F"](c[1],c[2],c[3]),s)
end
elseif #c == 4 then
if a and t then
- s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],c[4],a,t,s)
+ ctx_MPLIBsetCtext(tx_last,formatters["c=%F,m=%F,y=%F,k=%F,a=%F,t=%F"](c[1],c[2],c[3],c[4],a,t),s)
else
- s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f]%s"](c[1],c[2],c[3],c[4],s)
+ ctx_MPLIBsetCtext(tx_last,formatters["c=%F,m=%F,y=%F,k=%F"](c[1],c[2],c[3],c[4]),s)
end
+ else
+ ctx_MPLIBsetNtext(tx_last,s)
end
- context.MPLIBsettext(tx_last,s)
- metapost.multipass = true
- tx_hash[h] = tx_last
- texslots[textrial] = tx_last
- texorder[tx_number] = tx_last
+ top.multipass = true
+ metapost.multipass = true -- ugly
+ top.texhash[h] = tx_last
+ top.texslots[tx_trial] = tx_last
+ top.texorder[tx_number] = tx_last
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,tx_last,h)
+ report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,tx_trial,tx_number,tx_last,h)
end
else
- texslots[textrial] = n
+ top.texslots[tx_trial] = n
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,n,h)
+ report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,tx_trial,tx_number,n,h)
end
end
elseif tx_stage == "extra" then
- textrial = textrial + 1
+ local tx_trial = top.textrial + 1
+ top.textrial = tx_trial
local tx_number = tonumber(prescript.tx_number)
- if not texorder[tx_number] then
+ if not top.texorder[tx_number] then
local s = object.postscript or ""
- tx_last = tx_last + 1
+ local tx_last = top.texlast + 1
+ top.texlast = tx_last
context.MPLIBsettext(tx_last,s)
- metapost.multipass = true
- texslots[textrial] = tx_last
- texorder[tx_number] = tx_last
+ top.multipass = true
+ metapost.multipass = true -- ugly
+ top.texslots[tx_trial] = tx_last
+ top.texorder[tx_number] = tx_last
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, extra %a",tx_stage,textrial,tx_number,tx_last)
+ report_textexts("stage %a, usage %a, number %a, extra %a",tx_stage,tx_trial,tx_number,tx_last)
end
end
end
@@ -880,23 +956,23 @@ local function tx_process(object,prescript,before,after)
tx_number = tonumber(tx_number)
local tx_stage = prescript.tx_stage
if tx_stage == "final" then
- texfinal = texfinal + 1
- local n = texslots[texfinal]
+ top.texfinal = top.texfinal + 1
+ local n = top.texslots[top.texfinal]
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,texfinal,tx_number,n)
+ report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,top.texfinal,tx_number,n)
end
local sx, rx, ry, sy, tx, ty = cm(object) -- needs to be frozen outside the function
- local box = textexts[n]
+ local box = top.textexts[n]
if box then
before[#before+1] = function()
-- flush always happens, we can have a special flush function injected before
context.MPLIBgettextscaledcm(n,
- format("%f",sx), -- bah ... %s no longer checks
- format("%f",rx), -- bah ... %s no longer checks
- format("%f",ry), -- bah ... %s no longer checks
- format("%f",sy), -- bah ... %s no longer checks
- format("%f",tx), -- bah ... %s no longer checks
- format("%f",ty), -- bah ... %s no longer checks
+ f_f(sx), -- bah ... %s no longer checks
+ f_f(rx), -- bah ... %s no longer checks
+ f_f(ry), -- bah ... %s no longer checks
+ f_f(sy), -- bah ... %s no longer checks
+ f_f(tx), -- bah ... %s no longer checks
+ f_f(ty), -- bah ... %s no longer checks
sxsy(box.width,box.height,box.depth))
end
else
@@ -913,23 +989,25 @@ local function tx_process(object,prescript,before,after)
end
end
--- graphics
+-- graphics (we use the given index because pictures can be reused)
local graphics = { }
function metapost.intermediate.actions.makempy()
if #graphics > 0 then
makempy.processgraphics(graphics)
- graphics = { } -- ?
+ graphics = { } -- ? could be gt_reset
end
end
local function gt_analyze(object,prescript)
local gt_stage = prescript.gt_stage
- if gt_stage == "trial" then
- graphics[#graphics+1] = formatters["\\MPLIBgraphictext{%s}"](object.postscript or "")
- metapost.intermediate.needed = true
- metapost.multipass = true
+ local gt_index = tonumber(prescript.gt_index)
+ if gt_stage == "trial" and not graphics[gt_index] then
+ graphics[gt_index] = formatters["\\MPLIBgraphictext{%s}"](object.postscript or "")
+ top.intermediate = true
+ top.multipass = true
+ metapost.multipass = true -- ugly
end
end
@@ -1069,7 +1147,7 @@ end
-- color and transparency
local value = Cs ( (
- (Carg(1) * C((1-P(","))^1)) / function(a,b) return format("%0.3f",a * tonumber(b)) end
+ (Carg(1) * C((1-P(","))^1)) / function(a,b) return f_f3(a * tonumber(b)) end
+ P(","))^1
)
diff --git a/Master/texmf-dist/tex/context/base/mlib-pps.mkiv b/Master/texmf-dist/tex/context/base/mlib-pps.mkiv
index f21d84e0dbd..207d9730c8a 100644
--- a/Master/texmf-dist/tex/context/base/mlib-pps.mkiv
+++ b/Master/texmf-dist/tex/context/base/mlib-pps.mkiv
@@ -33,54 +33,88 @@
\newbox \MPtextbox
\newtoks\everyMPLIBsettext % not used
-\newconditional\MPLIBtextgetdone
-
% \def\MPLIBsettext#1% #2%
% {\dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox}
+%
+% \def\MPLIBresettexts
+% {\ctxlua{metapost.resettextexts()}}
+%
+% \newconditional\MPLIBtextgetdone
+%
+% \def\MPLIBsettext#1% #2%
+% {\ifconditional\MPLIBtextgetdone
+% \else
+% \cldcontext{metapost.tex.get()}% MPenvironments are depricated
+% \settrue\MPLIBtextgetdone % no \global needed
+% \fi
+% \dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox}
+%
+% \def\MPLIBresettexts
+% {\ctxlua{metapost.resettextexts()}%
+% \setfalse\MPLIBtextgetdone}
-\def\MPLIBsettext#1% #2%
- {\ifconditional\MPLIBtextgetdone
- \else
- \cldcontext{metapost.tex.get()}% MPenvironments are depricated
- \settrue\MPLIBtextgetdone % no \global needed
- \fi
- \dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox}
+\def\doMPLIBflushenvironment
+ {%\writestatus\m!metapost{flushing environment}%
+ \cldcontext{metapost.tex.get()}%
+ \let\MPLIBflushenvironment\relax}% MPenvironments are depricated}
-\def\MPLIBgettextscaled#1#2#3% why a copy .. can be used more often
+\let\MPLIBflushenvironment\doMPLIBflushenvironment
+
+\unexpanded\def\MPLIBsetNtext#1% #2% box text
+ {\MPLIBflushenvironment
+ \dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox\bgroup
+ \meta_set_current_color
+ \let\MPLIBflushenvironment\doMPLIBflushenvironment
+ \let\next} % gobble open brace
+
+\unexpanded\def\MPLIBsetCtext#1#2% #3% box colorspec text
+ {\MPLIBflushenvironment
+ \dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox\bgroup
+ \directcolored[#2]%
+ \meta_set_current_color % so, textcolor wins !
+ \let\MPLIBflushenvironment\doMPLIBflushenvironment
+ \let\next} % gobble open brace
+
+\let\MPLIBsettext\MPLIBsetNtext
+
+\unexpanded\def\MPLIBresettexts
+ {\ctxlua{metapost.resettextexts()}}
+
+\unexpanded\def\MPLIBgettextscaled#1#2#3% why a copy .. can be used more often
{\ctxlua{metapost.gettext(\number\MPtextbox,#1)}%
\vbox to \zeropoint{\vss\hbox to \zeropoint{\scale[\c!sx=#2,\c!sy=#3]{\raise\dp\MPtextbox\box\MPtextbox}\forcecolorhack\hss}}}
-\def\MPLIBfigure#1#2%
+\unexpanded\def\MPLIBfigure#1#2%
{\setbox\scratchbox\hbox{\externalfigure[#1][\c!mask=#2]}%
\ctxlua{metapost.edefsxsy(\number\wd\scratchbox,\number\ht\scratchbox,0)}%
\vbox to \zeropoint{\vss\hbox to \zeropoint{\scale[\c!sx=\sx,\c!sy=\sy]{\box\scratchbox}\hss}}}
-% horrible:
+% horrible (we could inline scale and matrix code):
-\def\MPLIBgettextscaledcm#1#2#3#4#5#6#7#8#9% 2-7: sx,rx,ry,sy,tx,ty
+\unexpanded\def\MPLIBgettextscaledcm#1#2#3#4#5#6#7#8#9% 2-7: sx,rx,ry,sy,tx,ty
{\ctxlua{metapost.gettext(\number\MPtextbox,#1)}%
\setbox\MPbox\hbox\bgroup
\dotransformnextbox{#2}{#3}{#4}{#5}{#6}{#7}% does push pop ... will be changed to proper lua call (avoid small numbers)
\vbox to \zeropoint\bgroup
\vss
\hbox to \zeropoint \bgroup
-% \scale[\c!sx=#8,\c!sy=#9]{\raise\dp\MPtextbox\box\MPtextbox}%
-% \scale[\c!sx=#8,\c!sy=#9,\c!depth=\v!no]{\box\MPtextbox}%
+ % \scale[\c!sx=#8,\c!sy=#9]{\raise\dp\MPtextbox\box\MPtextbox}%
+ % \scale[\c!sx=#8,\c!sy=#9,\c!depth=\v!no]{\box\MPtextbox}%
\fastsxsy{#8}{#9}{\raise\dp\MPtextbox\box\MPtextbox}%
- % This gives: LuaTeX warning: Misplaced \pdfrestore .. don't ask me why.
- % but I'll retry it some day soon.
- % \dostartscaling{#8}{#9}%
- % \raise\dp\MPtextbox\box\MPtextbox
- % \dostopscaling
- \forcecolorhack % needed ? already in the scale macro
- \hss
+ % This gives: LuaTeX warning: Misplaced \pdfrestore .. don't ask me why.
+ % but I'll retry it some day soon.
+ % \dostartscaling{#8}{#9}%
+ % \raise\dp\MPtextbox\box\MPtextbox
+ % \dostopscaling
+ \forcecolorhack % can go away ... already in the scale macro
+ \hss
\egroup
\egroup
\egroup
\smashbox\MPbox
\box\MPbox}
-\def\MPLIBgraphictext#1% use at mp end
+\unexpanded\def\MPLIBgraphictext#1% use at mp end
{\startTEXpage[\c!scale=10000]#1\stopTEXpage}
%D \startbuffer
@@ -109,18 +143,11 @@
%D
%D \typebuffer \startlinecorrection \getbuffer \stoplinecorrection
-\def\MPLIBpositionwhd#1#2#3#4#5% bp !
+\unexpanded\def\MPLIBpositionwhd#1#2#3#4#5% bp !
{\dosavepositionwhd{#1}\zerocount{#2\onebasepoint}{#3\onebasepoint}{#4\onebasepoint}{#5\onebasepoint}\zeropoint}
-\def\MPLIBextrapass#1%
- {\ctxlua{metapost.graphic_extra_pass("#1")}}
-
-% \def\MPLIBresettexts
-% {\ctxlua{metapost.resettextexts()}}
-
-\def\MPLIBresettexts
- {\ctxlua{metapost.resettextexts()}%$
- \setfalse\MPLIBtextgetdone}
+% \def\MPLIBextrapass#1%
+% {\ctxlua{metapost.graphic_extra_pass("#1")}}
%D Experiment
@@ -142,9 +169,9 @@
\box\scratchbox
\endgroup}
-\def\MPLIBstartgroup#1#2#3#4#5#6% isolated 0/1, knockout 0/1 llx lly urx ury
+\unexpanded\def\MPLIBstartgroup#1#2#3#4#5#6% isolated 0/1, knockout 0/1 llx lly urx ury
{\begingroup
\setbox\scratchbox\hbox\bgroup
- \def\MPLIBstopgroup{\doMPLIBstopgroup{#1}{#2}{#3}{#4}{#5}{#6}}}
+ \unexpanded\def\MPLIBstopgroup{\doMPLIBstopgroup{#1}{#2}{#3}{#4}{#5}{#6}}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/mlib-run.lua b/Master/texmf-dist/tex/context/base/mlib-run.lua
index 1fc36dd80aa..2a34f44d5c8 100644
--- a/Master/texmf-dist/tex/context/base/mlib-run.lua
+++ b/Master/texmf-dist/tex/context/base/mlib-run.lua
@@ -33,7 +33,7 @@ local type, tostring, tonumber = type, tostring, tonumber
local format, gsub, match, find = string.format, string.gsub, string.match, string.find
local concat = table.concat
local emptystring = string.is_empty
-local lpegmatch, P = lpeg.match, lpeg.P
+local P = lpeg.P
local trace_graphics = false trackers.register("metapost.graphics", function(v) trace_graphics = v end)
local trace_tracingall = false trackers.register("metapost.tracingall", function(v) trace_tracingall = v end)
@@ -121,7 +121,7 @@ local function o_finder(name,mode,ftype)
return name
end
-local function finder(name, mode, ftype)
+local function finder(name,mode,ftype)
if mode == "w" then
return o_finder(name,mode,ftype)
else
@@ -284,6 +284,8 @@ if mplibone then
else
+ -- let end = relax ;
+
local preamble = [[
boolean mplib ; mplib := true ;
let dump = endinput ;
@@ -293,17 +295,28 @@ else
local methods = {
double = "double",
scaled = "scaled",
+ binary = "binary",
+ decimal = "decimal",
default = "scaled",
- decimal = false, -- for the moment
}
+ function metapost.runscript(code)
+ return code
+ end
+
+ function metapost.scripterror(str)
+ report_metapost("script error: %s",str)
+ end
+
function metapost.load(name,method)
starttiming(mplib)
method = method and methods[method] or "scaled"
local mpx = mplib.new {
- ini_version = true,
- find_file = finder,
- math_mode = method,
+ ini_version = true,
+ find_file = finder,
+ math_mode = method,
+ run_script = metapost.runscript,
+ script_error = metapost.scripterror,
}
report_metapost("initializing number mode %a",method)
local result
@@ -400,6 +413,10 @@ local mp_inp, mp_log, mp_tag = { }, { }, 0
-- key/values
+if not metapost.initializescriptrunner then
+ function metapost.initializescriptrunner() end
+end
+
function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass, askedfig)
local converted, result = false, { }
if type(mpx) == "string" then
@@ -407,6 +424,7 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
end
if mpx and data then
starttiming(metapost)
+ metapost.initializescriptrunner(mpx,trialrun)
if trace_graphics then
if not mp_inp[mpx] then
mp_tag = mp_tag + 1
@@ -589,3 +607,54 @@ function metapost.directrun(formatname,filename,outputformat,astable,mpdata)
end
end
end
+
+-- goodie
+
+function metapost.quickanddirty(mpxformat,data)
+ if not data then
+ mpxformat = "metafun"
+ data = mpxformat
+ end
+ local code, bbox
+ local flusher = {
+ startfigure = function(n,llx,lly,urx,ury)
+ code = { }
+ bbox = { llx, lly, urx, ury }
+ end,
+ flushfigure = function(t)
+ for i=1,#t do
+ code[#code+1] = t[i]
+ end
+ end,
+ stopfigure = function()
+ end
+ }
+ local data = format("; beginfig(1) ;\n %s\n ; endfig ;",data)
+ metapost.process(mpxformat, { data }, false, flusher, false, false, "all")
+ if code then
+ return {
+ bbox = bbox or { 0, 0, 0, 0 },
+ code = code,
+ data = data,
+ }
+ else
+ report_metapost("invalid quick and dirty run")
+ end
+end
+
+function metapost.getstatistics(memonly)
+ if memonly then
+ local n, m = 0, 0
+ for name, mpx in next, mpxformats do
+ n = n + 1
+ m = m + mpx:statistics().memory
+ end
+ return n, m
+ else
+ local t = { }
+ for name, mpx in next, mpxformats do
+ t[name] = mpx:statistics()
+ end
+ return t
+ end
+end
diff --git a/Master/texmf-dist/tex/context/base/mtx-context-xml.tex b/Master/texmf-dist/tex/context/base/mtx-context-xml.tex
new file mode 100644
index 00000000000..eeaca321340
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/mtx-context-xml.tex
@@ -0,0 +1,76 @@
+%D \module
+%D [ file=mtx-context-xml,
+%D version=2013.05.30,
+%D title=\CONTEXT\ Extra Trickry,
+%D subtitle=Analyzing XML files,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% This module replaces mkii analyzers.
+
+% begin help
+%
+% usage: context --extra=xml [options] list-of-files
+%
+% --analyze : show elements and characters
+% --topspace=dimension : distance above first line
+% --backspace=dimension : distance before left margin
+% --bodyfont=list : additional bodyfont settings
+% --paperformat=spec : paper*print or paperxprint
+%
+% end help
+
+\input mtx-context-common.tex
+
+\setupbodyfont
+ [dejavu,11pt,tt,\getdocumentargument{bodyfont}]
+
+\setuptyping
+ [lines=yes]
+
+\setuplayout
+ [header=0cm,
+ footer=1.5cm,
+ topspace=\getdocumentargumentdefault{topspace}{1.5cm},
+ backspace=\getdocumentargumentdefault{backspace}{1.5cm},
+ width=middle,
+ height=middle]
+
+\setuppapersize
+ [\getdocumentargument{paperformat_paper}]
+ [\getdocumentargument{paperformat_print}]
+
+\usemodule[xml-analyzers]
+
+\starttext
+
+\startluacode
+ local pattern = document.arguments.pattern
+ local files = document.files
+
+ if pattern then
+ files = dir.glob(pattern)
+ context.setupfootertexts( { pattern }, { "pagenumber" })
+ else
+ context.setupfootertexts( { table.concat(files," ") }, { "pagenumber" })
+ end
+
+ if #files > 0 then
+ if document.arguments.analyze then
+ moduledata.xml.analyzers.structure (files)
+ context.page()
+ moduledata.xml.analyzers.characters(files)
+ else
+ context("no action given")
+ end
+ else
+ context("no files given")
+ end
+\stopluacode
+
+\stoptext
diff --git a/Master/texmf-dist/tex/context/base/mult-aux.lua b/Master/texmf-dist/tex/context/base/mult-aux.lua
index 3c4cbcc0fce..5a103213cde 100644
--- a/Master/texmf-dist/tex/context/base/mult-aux.lua
+++ b/Master/texmf-dist/tex/context/base/mult-aux.lua
@@ -12,6 +12,8 @@ interfaces.namespaces = interfaces.namespaces or { }
local namespaces = interfaces.namespaces
local variables = interfaces.variables
+local context = context
+
local trace_namespaces = false trackers.register("interfaces.namespaces", function(v) trace_namespaces = v end)
local report_namespaces = logs.reporter("interface","namespaces")
@@ -52,7 +54,7 @@ function namespaces.define(namespace,settings)
if trace_namespaces then
report_namespaces("namespace %a for %a uses parent %a",namespace,name,parent)
end
- if not find(parent,"\\") then
+ if not find(parent,"\\",1,true) then
parent = "\\" .. prefix .. parent
-- todo: check if defined
end
diff --git a/Master/texmf-dist/tex/context/base/mult-aux.mkiv b/Master/texmf-dist/tex/context/base/mult-aux.mkiv
index 6c44a0ec9d0..1811f959238 100644
--- a/Master/texmf-dist/tex/context/base/mult-aux.mkiv
+++ b/Master/texmf-dist/tex/context/base/mult-aux.mkiv
@@ -106,10 +106,14 @@
\doubleexpandafter\gobbleoneargument
\else
\mult_interfaces_get_parameters_assign#1==\empty\_e_o_p_
- \doubleexpandafter\mult_interfaces_get_parameters_item
+ % \doubleexpandafter\mult_interfaces_get_parameters_item % saves skipping when at end
\fi\fi#2}
-\def\mult_interfaces_get_parameters_error#1#2#3%
+\def\mult_interfaces_get_parameters_error#1#2% #3%
+ {\mult_interfaces_get_parameters_error_indeed{#1}{#2}%
+ \gobbleoneargument}
+
+\def\mult_interfaces_get_parameters_error_indeed#1#2%
{\showassignerror{#2}{\the\inputlineno\space(#1)}}
\def\mult_interfaces_get_parameters_assign#1=#2=#3#4\_e_o_p_
@@ -118,9 +122,54 @@
\else\ifx#3\empty
\doubleexpandafter\mult_interfaces_get_parameters_error
\else
- \doubleexpandafter\dosetvalue
+ \doubleexpandafter\mult_interfaces_def
\fi\fi
- \m_mult_interfaces_namespace{#1}{#2}}
+ \m_mult_interfaces_namespace{#1}{#2}%
+ \doubleexpandafter\mult_interfaces_get_parameters_item}
+
+\startinterface english
+
+ % some 10% faster
+
+ \let\mult_interfaces_get_parameters_error\undefined
+
+ \def\mult_interfaces_get_parameters_error_one#1\csname#2#3\endcsname#4%
+ {\mult_interfaces_get_parameters_error_indeed{#2}{#3}\iftrue}
+
+ \def\mult_interfaces_get_parameters_error_two#1\csname#2#3\endcsname#4%
+ {\mult_interfaces_get_parameters_error_indeed{#2}{#3}}
+
+ \def\mult_interfaces_get_parameters_assign#1=#2=#3#4\_e_o_p_
+ {\ifx\empty#1\empty
+ \mult_interfaces_get_parameters_error_one
+ \else\ifx#3\empty
+ \mult_interfaces_get_parameters_error_two
+ \else
+ \expandafter\def\csname\m_mult_interfaces_namespace#1\endcsname{#2}%
+ \fi\fi
+ \doubleexpandafter\mult_interfaces_get_parameters_item}
+
+ % interesting but not faster
+ %
+ % \def\mult_interfaces_get_parameters_error_one#1\m_mult_interfaces_namespace#2\fi\fi%
+ % {\mult_interfaces_get_parameters_error_indeed\m_mult_interfaces_namespace{#2}\m_mult_interfaces_namespace\s!dummy\fi}
+ %
+ % \def\mult_interfaces_get_parameters_error_two#1\m_mult_interfaces_namespace#2\fi\fi%
+ % {\mult_interfaces_get_parameters_error_indeed\m_mult_interfaces_namespace{#2}\m_mult_interfaces_namespace\s!dummy\fi\fi}
+ %
+ % \def\mult_interfaces_get_parameters_assign#1=#2=#3#4\_e_o_p_
+ % {\expandafter\def\csname
+ % \ifx\empty#1\empty
+ % \mult_interfaces_get_parameters_error_one
+ % \else\ifx#3\empty
+ % \mult_interfaces_get_parameters_error_two
+ % \else
+ % \m_mult_interfaces_namespace#1%
+ % \fi\fi
+ % \endcsname{#2}
+ % \doubleexpandafter\mult_interfaces_get_parameters_item}
+
+\stopinterface
\newif\ifassignment
@@ -132,6 +181,24 @@
% End of experimental code.
+\unexpanded\def\mult_interfaces_let #1#2{\expandafter\let \csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+\unexpanded\def\mult_interfaces_lete#1#2{\expandafter\let \csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname\empty}
+\unexpanded\def\mult_interfaces_def #1#2{\expandafter\def \csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+\unexpanded\def\mult_interfaces_edef#1#2{\expandafter\edef\csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+\unexpanded\def\mult_interfaces_gdef#1#2{\expandafter\gdef\csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+\unexpanded\def\mult_interfaces_xdef#1#2{\expandafter\xdef\csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+
+\startinterface english
+
+ \unexpanded\def\mult_interfaces_let #1#2{\expandafter \let\csname#1#2\endcsname}
+ \unexpanded\def\mult_interfaces_lete#1#2{\expandafter \let\csname#1#2\endcsname\empty}
+ \unexpanded\def\mult_interfaces_def #1#2{\expandafter \def\csname#1#2\endcsname}
+ \unexpanded\def\mult_interfaces_edef#1#2{\expandafter\edef\csname#1#2\endcsname}
+ \unexpanded\def\mult_interfaces_gdef#1#2{\expandafter\gdef\csname#1#2\endcsname}
+ \unexpanded\def\mult_interfaces_xdef#1#2{\expandafter\xdef\csname#1#2\endcsname}
+
+\stopinterface
+
% the commented detokenized variant that backtracks ... needs testing usage first
%
% \let\whatever\relax
@@ -207,14 +274,14 @@
% In \MKIV\ we can probably use the english variant for all other
% languages too.
-% todo: inline the \do*value
+% todo: inline the def/let
\unexpanded\def\mult_interfaces_install_parameter_set_handler#1#2#3#4#5#6%
{\ifx#2\relax\let#2\empty\fi
- \unexpanded\def#3{\dosetvalue {#1#2:}}% ##1 {##2} (braces are mandate)
- \unexpanded\def#4{\dosetevalue{#1#2:}}% ##1 {##2} (braces are mandate)
- \unexpanded\def#5{\doletvalue {#1#2:}}% ##1 ##2
- \unexpanded\def#6{\doletvalue {#1#2:}\empty}}% ##1
+ \unexpanded\def#3{\mult_interfaces_def {#1#2:}}% ##1 {##2} (braces are mandate)
+ \unexpanded\def#4{\mult_interfaces_edef{#1#2:}}% ##1 {##2} (braces are mandate)
+ \unexpanded\def#5{\mult_interfaces_let {#1#2:}}% ##1 ##2
+ \unexpanded\def#6{\mult_interfaces_lete{#1#2:}}}% ##1
\startinterface english
@@ -548,10 +615,10 @@
\expandafter\noexpand\csname everysetup#2\endcsname}}
\unexpanded\def\mult_interfaces_install_direct_parameter_set_handler#1#2#3#4#5%
- {\unexpanded\def#2{\dosetvalue #1}%
- \unexpanded\def#3{\dosetevalue#1}%
- \unexpanded\def#4{\doletvalue #1}%
- \unexpanded\def#5{\doletvalue #1\empty}}%
+ {\unexpanded\def#2{\mult_interfaces_def #1}%
+ \unexpanded\def#3{\mult_interfaces_edef#1}%
+ \unexpanded\def#4{\mult_interfaces_let #1}%
+ \unexpanded\def#5{\mult_interfaces_let #1\empty}}%
\startinterface english
@@ -694,9 +761,8 @@
\ctxcommand{registernamespace(\number\c_mult_interfaces_n_of_namespaces,"#1")}%
\fi}
-\def\mult_interfaces_get_parameters_error#1#2#3% redefined
- {\ctxcommand{showassignerror("#1","#2","#3",\the\inputlineno)}%
- \waitonfatalerror}
+\def\mult_interfaces_get_parameters_error_indeed#1#2%
+ {\ctxcommand{showassignerror("#1","#2",\the\inputlineno)}} % no longer \waitonfatalerror
% We install two core namespaces here, as we want nice error messages. Maybe
% we will reserve the first 9.
@@ -856,4 +922,198 @@
%D \edef\m_class_whatever{whatever}
%D \stoptyping
+% experiment: in principle this is faster but not that noticeable as we don't do that
+% many assignments and mechanism that do are also slow; the advantage is mostly nicer
+% in tracing
+
+\def\s!simple{simple}
+\def\s!single{single}
+\def\s!double{double}
+\def\s!triple{triple}
+
+\unexpanded\def\syst_helpers_double_empty#1#2#3%
+ {\syst_helpers_argument_reset
+ \doifnextoptionalelse
+ {\syst_helpers_double_empty_one_yes_mult#2#3}%
+ {\syst_helpers_double_empty_one_nop_mult#1}}
+
+\def\syst_helpers_double_empty_one_yes_mult#1#2[#3]%
+ {\firstargumenttrue
+ \doifnextoptionalelse
+ {\secondargumenttrue#2[{#3}]}%
+ {\syst_helpers_double_empty_two_nop_mult#1{#3}}}
+
+\def\syst_helpers_double_empty_one_nop_mult% #1%
+ {\firstargumentfalse
+ \secondargumentfalse
+ }% #1}
+
+\def\syst_helpers_double_empty_two_nop_mult
+ {\secondargumentfalse
+ \if_next_blank_space_token
+ \expandafter\syst_helpers_double_empty_one_spaced_mult
+ \else
+ \expandafter\syst_helpers_double_empty_one_normal_mult
+ \fi}
+
+\def\syst_helpers_double_empty_one_spaced_mult#1#2{#1[{#2}] }
+\def\syst_helpers_double_empty_one_normal_mult#1#2{#1[{#2}]}
+
+\unexpanded\def\mult_interfaces_install_setup_handler#1#2#3#4#5#6#7#8%
+ {\ifx#3\relax\let#3\empty\fi
+ \unexpanded\def#5{\mult_interfaces_get_parameters{#1#3:}}% no every ! don't change it
+ \newtoks#4%
+ \newtoks#7%
+ \edef\m_mult_interface_setup{\strippedcsname#2_}%
+ \unexpanded\edef#2{\syst_helpers_double_empty
+ \csname\m_mult_interface_setup\s!simple\endcsname
+ \csname\m_mult_interface_setup\s!single\endcsname
+ \csname\m_mult_interface_setup\s!double\endcsname}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!double\endcsname[##1][##2]%
+ {\let#6#3%
+ \def#8####1% we will have a simple one as well
+ {\edef#3{####1}%
+ \mult_interfaces_get_parameters{#1#3:}[##2]%
+ \the#4}%
+ \processcommalist[##1]#8%
+ \let#3#6%
+ \the#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!single\endcsname[##1]%
+ {\let#6#3%
+ \let#3\empty
+ \mult_interfaces_get_parameters{#1:}[##1]%
+ \the#4%
+ \let#3#6%
+ \the#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!simple\endcsname%
+ {\let#6#3%
+ \let#3\empty
+ \the#4%
+ \let#3#6%
+ \the#7}}
+
+\unexpanded\def\installsetuphandler#1#2%
+ {\normalexpanded
+ {\mult_interfaces_install_setup_handler
+ {\noexpand#1}% \??aa
+ \expandafter\noexpand\csname setup#2\endcsname
+ \expandafter\noexpand\csname current#2\endcsname
+ \expandafter\noexpand\csname everysetup#2\endcsname
+ \expandafter\noexpand\csname setupcurrent#2\endcsname
+ \expandafter\noexpand\csname saved_setup_current#2\endcsname
+ \expandafter\noexpand\csname everysetup#2root\endcsname
+ \expandafter\noexpand\csname nested_setup_current#2\endcsname}}
+
+\unexpanded\def\syst_helpers_triple_empty#1#2#3#4%
+ {\syst_helpers_argument_reset
+ \doifnextoptionalelse
+ {\syst_helpers_triple_empty_one_yes_mult#2#3#4}%
+ {\syst_helpers_triple_empty_one_nop_mult#1}}
+
+\def\syst_helpers_triple_empty_one_yes_mult#1#2#3[#4]%
+ {\firstargumenttrue
+ \doifnextoptionalelse
+ {\syst_helpers_triple_empty_two_yes_mult#2#3{#4}}%
+ {\syst_helpers_triple_empty_two_nop_mult#1{#4}}}
+
+\def\syst_helpers_triple_empty_two_yes_mult#1#2#3[#4]%
+ {\secondargumenttrue
+ \doifnextoptionalelse
+ {\thirdargumenttrue#2[{#3}][{#4}]}%
+ {\syst_helpers_triple_empty_three_nop_mult#1{#3}{#4}}}
+
+\def\syst_helpers_triple_empty_one_nop_mult % #1%
+ {\firstargumentfalse
+ \secondargumentfalse
+ \thirdargumentfalse
+ } % #1
+
+\def\syst_helpers_triple_empty_two_nop_mult
+ {\secondargumentfalse
+ \thirdargumentfalse
+ \if_next_blank_space_token
+ \expandafter\syst_helpers_triple_empty_two_spaced_mult
+ \else
+ \expandafter\syst_helpers_triple_empty_two_normal_mult
+ \fi}
+
+\def\syst_helpers_triple_empty_three_nop_mult
+ {\thirdargumentfalse
+ \if_next_blank_space_token
+ \expandafter\syst_helpers_triple_empty_three_spaced_mult
+ \else
+ \expandafter\syst_helpers_triple_empty_three_normal_mult
+ \fi}
+
+\def\syst_helpers_triple_empty_two_spaced_mult #1#2{#1[{#2}] }
+\def\syst_helpers_triple_empty_two_normal_mult #1#2{#1[{#2}]}
+\def\syst_helpers_triple_empty_three_spaced_mult#1#2#3{#1[{#2}][{#3}] }
+\def\syst_helpers_triple_empty_three_normal_mult#1#2#3{#1[{#2}][{#3}]}
+
+\unexpanded\def\mult_interfaces_install_auto_setup_handler#1#2#3#4#5#6#7#8%
+ {\ifx#3\relax\let#3\empty\fi
+ \unexpanded\def#5{\mult_interfaces_get_parameters{#1#3:}}%
+ \newtoks#4%
+ \edef\m_mult_interface_setup{\strippedcsname#2_}%
+ \unexpanded\edef#2{\syst_helpers_triple_empty
+ \csname\m_mult_interface_setup\s!simple\endcsname
+ \csname\m_mult_interface_setup\s!single\endcsname
+ \csname\m_mult_interface_setup\s!double\endcsname
+ \csname\m_mult_interface_setup\s!triple\endcsname}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!triple\endcsname[##1][##2][##3]%
+ {\let#7#3%
+ \def#8####1%
+ {\edef#3{####1}%
+ \expandafter\def\csname#1#3:\s!parent\endcsname{#1##2}%
+ \mult_interfaces_get_parameters{#1#3:}[##3]% always sets parent
+ \the#4}%
+ \processcommalist[##1]#8%
+ \let#3#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!double\endcsname[##1][##2]%
+ {\let#7#3%
+ \def#8####1%
+ {\edef#3{####1}%
+ #6% checks parent and sets if needed
+ \mult_interfaces_get_parameters{#1#3:}[##2]%
+ \the#4}%
+ \processcommalist[##1]#8%
+ \let#3#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!single\endcsname[##1]%
+ {\let#7#3%
+ \let#3\empty
+ \mult_interfaces_get_parameters{#1:}[##1]%
+ \the#4%
+ \let#3#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!simple\endcsname%
+ {\let#7#3%
+ \let#3\empty
+ \the#4%
+ \let#3#7}}
+
+\unexpanded\def\installautosetuphandler#1#2%
+ {\normalexpanded
+ {\mult_interfaces_install_auto_setup_handler
+ {\noexpand#1}% \??aa
+ \expandafter\noexpand\csname setup#2\endcsname
+ \expandafter\noexpand\csname current#2\endcsname
+ \expandafter\noexpand\csname everysetup#2\endcsname
+ \expandafter\noexpand\csname setupcurrent#2\endcsname
+ \expandafter\noexpand\csname check#2parent\endcsname
+ \expandafter\noexpand\csname saved_setup_current#2\endcsname
+ \expandafter\noexpand\csname nested_setup_current#2\endcsname}}
+
+% okay, we can also get rid of the #9, but thsi code looks pretty bad, while the previous is
+% still okay given that we can also use #6 as setup1 (so in fact we can save some cs again and
+% only use one extra)
+%
+% \global\advance\commalevel \plusone
+% \expandafter\def\csname\??nextcommalevel\the\commalevel\endcsname####1,%
+% {\edef#3{####1}%
+% \mult_interfaces_get_parameters{#1#3:}[##2]%
+% \the#5%
+% \syst_helpers_do_process_comma_item}%
+% \expandafter\syst_helpers_do_do_process_comma_item\gobbleoneargument\relax##1,]\relax
+% % \syst_helpers_do_do_process_comma_item##1,]\relax
+% \global\advance\commalevel \minusone
+
\protect
diff --git a/Master/texmf-dist/tex/context/base/mult-de.mkii b/Master/texmf-dist/tex/context/base/mult-de.mkii
index 893a9d35832..0c4dae5b6f0 100644
--- a/Master/texmf-dist/tex/context/base/mult-de.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-de.mkii
@@ -281,6 +281,7 @@
\setinterfacevariable{marginedge}{marginalkante}
\setinterfacevariable{margintitle}{marginaltitel}
\setinterfacevariable{marking}{beschriftung}
+\setinterfacevariable{math}{math}
\setinterfacevariable{mathalignment}{mathalignment}
\setinterfacevariable{mathcases}{mathcases}
\setinterfacevariable{mathematics}{mathematik}
@@ -414,12 +415,14 @@
\setinterfacevariable{sectionblockenvironment}{sectionblockenvironment}
\setinterfacevariable{sectionnumber}{abschnittsnummer}
\setinterfacevariable{see}{sieh}
+\setinterfacevariable{selectfont}{selectfont}
\setinterfacevariable{september}{september}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{kleinerabstand}
\setinterfacevariable{setups}{impostazioni}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{kurz}
+\setinterfacevariable{simplefonts}{simplefonts}
\setinterfacevariable{singlesided}{einzelseitig}
\setinterfacevariable{slanted}{geneigt}
\setinterfacevariable{slantedbold}{geneigtfett}
@@ -593,6 +596,12 @@
\setinterfaceconstant{blockway}{blockauf}
\setinterfaceconstant{bodyfont}{fliesstext}
\setinterfaceconstant{boffset}{boffset}
+\setinterfaceconstant{boldfeatures}{boldfeatures}
+\setinterfaceconstant{boldfont}{boldfont}
+\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures}
+\setinterfaceconstant{bolditalicfont}{bolditalicfont}
+\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures}
+\setinterfaceconstant{boldslantedfont}{boldslantedfont}
\setinterfaceconstant{bookmark}{bookmark}
\setinterfaceconstant{bottom}{unten}
\setinterfaceconstant{bottomafter}{bottomafter}
@@ -608,6 +617,7 @@
\setinterfaceconstant{calculate}{berechnen}
\setinterfaceconstant{category}{category}
\setinterfaceconstant{ccommand}{cbefehl}
+\setinterfaceconstant{check}{check}
\setinterfaceconstant{click}{klick}
\setinterfaceconstant{clickin}{klickin}
\setinterfaceconstant{clickout}{klickaus}
@@ -630,6 +640,7 @@
\setinterfaceconstant{component}{component}
\setinterfaceconstant{compoundhyphen}{compoundhyphen}
\setinterfaceconstant{compress}{compress}
+\setinterfaceconstant{concerns}{concerns}
\setinterfaceconstant{connector}{connector}
\setinterfaceconstant{continue}{fortsetzen}
\setinterfaceconstant{contrastcolor}{kontrastfarbe}
@@ -682,6 +693,7 @@
\setinterfaceconstant{factor}{faktor}
\setinterfaceconstant{fallback}{fallback}
\setinterfaceconstant{family}{familie}
+\setinterfaceconstant{features}{features}
\setinterfaceconstant{fieldbackgroundcolor}{feldhintergrundfarbe}
\setinterfaceconstant{fieldframecolor}{feldrahmenfarbe}
\setinterfaceconstant{fieldlayer}{fieldlayer}
@@ -711,6 +723,7 @@
\setinterfaceconstant{from}{von}
\setinterfaceconstant{get}{hole}
\setinterfaceconstant{global}{global}
+\setinterfaceconstant{goodies}{goodies}
\setinterfaceconstant{grid}{gritter}
\setinterfaceconstant{hang}{haengend}
\setinterfaceconstant{hcompact}{hcompact}
@@ -747,6 +760,8 @@
\setinterfaceconstant{interaction}{interaktion}
\setinterfaceconstant{interlinespace}{zeilenabstande}
\setinterfaceconstant{internalgrid}{internalgrid}
+\setinterfaceconstant{italicfeatures}{italicfeatures}
+\setinterfaceconstant{italicfont}{italicfont}
\setinterfaceconstant{itemalign}{itemalign}
\setinterfaceconstant{items}{posten}
\setinterfaceconstant{juniorsep}{juniorsep}
@@ -798,9 +813,12 @@
\setinterfaceconstant{marginedge}{marginalkante}
\setinterfaceconstant{marginedgetext}{marginalkantetext}
\setinterfaceconstant{margintext}{marginaltext}
+\setinterfaceconstant{mark}{mark}
\setinterfaceconstant{marking}{beschriftung}
\setinterfaceconstant{marstyle}{beschrstil}
\setinterfaceconstant{mask}{mask}
+\setinterfaceconstant{mathclass}{mathclass}
+\setinterfaceconstant{mathlimits}{mathlimits}
\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{max}
\setinterfaceconstant{maxdepth}{maxdepth}
@@ -818,8 +836,13 @@
\setinterfaceconstant{mindepth}{mindepth}
\setinterfaceconstant{minheight}{minhoehe}
\setinterfaceconstant{minwidth}{minbreite}
+\setinterfaceconstant{moffset}{moffset}
\setinterfaceconstant{monthconversion}{monthconversion}
\setinterfaceconstant{more}{more}
+\setinterfaceconstant{mpdepth}{mpdepth}
+\setinterfaceconstant{mpheight}{mpheight}
+\setinterfaceconstant{mpoffset}{mpoffset}
+\setinterfaceconstant{mpwidth}{mpwidth}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{name}
\setinterfaceconstant{namesep}{namesep}
@@ -859,6 +882,7 @@
\setinterfaceconstant{openaction}{oeffenaktion}
\setinterfaceconstant{openpage}{openpage}
\setinterfaceconstant{openpageaction}{openpageaction}
+\setinterfaceconstant{opticalsize}{opticalsize}
\setinterfaceconstant{option}{option}
\setinterfaceconstant{order}{order}
\setinterfaceconstant{orientation}{orientation}
@@ -926,6 +950,8 @@
\setinterfaceconstant{region}{region}
\setinterfaceconstant{regionin}{regionin}
\setinterfaceconstant{regionout}{regionaus}
+\setinterfaceconstant{regularfeatures}{regularfeatures}
+\setinterfaceconstant{regularfont}{regularfont}
\setinterfaceconstant{repeat}{wiederholen}
\setinterfaceconstant{reset}{reset}
\setinterfaceconstant{resetnumber}{resetnumber}
@@ -972,6 +998,8 @@
\setinterfaceconstant{sectionstarter}{sectionstarter}
\setinterfaceconstant{sectionstopper}{sectionstopper}
\setinterfaceconstant{separator}{seperator}
+\setinterfaceconstant{separatorcolor}{separatorcolor}
+\setinterfaceconstant{separatorstyle}{separatorstyle}
\setinterfaceconstant{set}{set}
\setinterfaceconstant{setups}{setups}
\setinterfaceconstant{shrink}{shrink}
@@ -982,7 +1010,11 @@
\setinterfaceconstant{sidespacebefore}{nebenvorspatium}
\setinterfaceconstant{sign}{zeichen}
\setinterfaceconstant{size}{groesse}
+\setinterfaceconstant{slantedfeatures}{slantedfeatures}
+\setinterfaceconstant{slantedfont}{slantedfont}
\setinterfaceconstant{small}{klein}
+\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures}
+\setinterfaceconstant{smallcapsfont}{smallcapsfont}
\setinterfaceconstant{solution}{solution}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{sorttype}
@@ -1013,6 +1045,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/Master/texmf-dist/tex/context/base/mult-def.lua b/Master/texmf-dist/tex/context/base/mult-def.lua
index fdb8803bf33..fc2b932c249 100644
--- a/Master/texmf-dist/tex/context/base/mult-def.lua
+++ b/Master/texmf-dist/tex/context/base/mult-def.lua
@@ -3055,7 +3055,7 @@ return {
["pe"]="درجشمارهصفحه",
["ro"]="punenumarpagina",
},
- ["placereferencelist"]={
+ ["placereferencelist"]={ -- not in mkiv
["cs"]="placereferencelist",
["de"]="placereferencelist",
["en"]="placereferencelist",
@@ -6454,6 +6454,34 @@ return {
},
},
["constants"]={
+ -- select/simplefonts
+ ["regularfont"] ={ ["en"]="regularfont" },
+ ["boldfont"] ={ ["en"]="boldfont" },
+ ["italicfont"] ={ ["en"]="italicfont" },
+ ["slantedfont"] ={ ["en"]="slantedfont" },
+ ["bolditalicfont"] ={ ["en"]="bolditalicfont" },
+ ["boldslantedfont"] ={ ["en"]="boldslantedfont" },
+ ["smallcapsfont"] ={ ["en"]="smallcapsfont" },
+ ["features"] ={ ["en"]="features" },
+ ["regularfeatures"] ={ ["en"]="regularfeatures" },
+ ["boldfeatures"] ={ ["en"]="boldfeatures" },
+ ["italicfeatures"] ={ ["en"]="italicfeatures" },
+ ["slantedfeatures"] ={ ["en"]="slantedfeatures" },
+ ["bolditalicfeatures"] ={ ["en"]="bolditalicfeatures" },
+ ["boldslantedfeatures"]={ ["en"]="boldslantedfeatures" },
+ ["smallcapsfeatures"] ={ ["en"]="smallcapsfeatures" },
+ ["opticalsize"] ={ ["en"]="opticalsize" },
+ ["goodies"] ={ ["en"]="goodies" },
+ ["check"] ={ ["en"]="check" },
+ --
+ ["separatorcolor"]={
+ ["en"]="separatorcolor",
+ ["nl"]="scheiderkleur",
+ },
+ ["separatorstyle"]={
+ ["en"]="separatorstyle",
+ ["nl"]="scheiderletter",
+ },
["less"]={
["en"]="less",
["nl"]="minder",
@@ -6585,6 +6613,9 @@ return {
["firstnamesep"]={
["en"]="firstnamesep",
},
+ ["surnamefirstnamesep"]={
+ ["en"]="surnamefirstnamesep",
+ },
["vonsep"]={
["en"]="vonsep",
},
@@ -6594,6 +6625,9 @@ return {
["surnamesep"]={
["en"]="surnamesep",
},
+ ["surnameinitialsep"]={
+ ["en"]="surnameinitialsep",
+ },
["lastnamesep"]={
["en"]="lastnamesep",
},
@@ -6982,6 +7016,10 @@ return {
["pe"]="قبلازسر",
["ro"]="inaintetitlu",
},
+ ["concerns"]={
+ ["en"]="concerns",
+ ["nl"]="betreft",
+ },
["bet"]={
["cs"]="bet",
["de"]="bet",
@@ -8361,6 +8399,10 @@ return {
["pe"]="آیتمها",
["ro"]="elemente",
},
+ ["mark"]={
+ ["en"]="mark",
+ ["nl"]="kernmerk",
+ },
["ken"]={
["cs"]="ken",
["de"]="ken",
@@ -8924,6 +8966,32 @@ return {
["pe"]="کمترینعرض",
["ro"]="latimeminima",
},
+ ["moffset"]={
+ ["cs"]="moffset",
+ ["de"]="moffset",
+ ["en"]="moffset",
+ ["fr"]="moffset",
+ ["it"]="moffset",
+ ["nl"]="moffset",
+ ["pe"]="moffset",
+ ["ro"]="moffset",
+ },
+ ["mpwidth"]={
+ ["en"]="mpwidth",
+ ["nl"]="mpbreedte",
+ },
+ ["mpheight"]={
+ ["en"]="mpheight",
+ ["nl"]="mphoogte",
+ },
+ ["mpdepth"]={
+ ["en"]="mpdepth",
+ ["nl"]="mpdiepte",
+ },
+ ["mpoffset"]={
+ ["en"]="mpoffset",
+ ["nl"]="mpoffset",
+ },
["monthconversion"]={
["en"]="monthconversion",
["nl"]="maandconversie",
@@ -9640,7 +9708,7 @@ return {
["en"]="reference",
["fr"]="reference",
["it"]="riferimento",
- ["nl"]="verwijzing",
+ ["nl"]="referentie",
["pe"]="مرجع",
["ro"]="referinta",
},
@@ -10363,9 +10431,15 @@ return {
["pe"]="بست",
["ro"]="strut",
},
+ ["mathclass"]={
+ ["en"]="mathclass",
+ },
["mathstyle"]={
["en"]="mathstyle",
},
+ ["mathlimits"]={
+ ["en"]="mathlimits",
+ },
["style"]={
["cs"]="pismeno",
["de"]="stil",
@@ -10843,7 +10917,7 @@ return {
["en"]="unknownreference",
["fr"]="referenceinconnue",
["it"]="riferimentoingoto",
- ["nl"]="onbekendeverwijzing",
+ ["nl"]="onbekendereferentie",
["pe"]="مرجعناشناس",
["ro"]="referintanecunoscuta",
},
@@ -11325,6 +11399,15 @@ return {
},
},
["variables"]={
+ ["math"]={
+ ["en"]="math",
+ },
+ ["selectfont"]={
+ ["en"]="selectfont",
+ },
+ ["simplefonts"]={
+ ["en"]="simplefonts",
+ },
["more"]={
["en"]="more",
["nl"]="meer",
diff --git a/Master/texmf-dist/tex/context/base/mult-def.mkiv b/Master/texmf-dist/tex/context/base/mult-def.mkiv
index cb165b055d8..35b2127106d 100644
--- a/Master/texmf-dist/tex/context/base/mult-def.mkiv
+++ b/Master/texmf-dist/tex/context/base/mult-def.mkiv
@@ -34,6 +34,12 @@
% start todo:
+\def\c!fences {fences}
+\def\c!keeptogether {keeptogether}
+\def\c!viewerprefix {viewerprefix}
+
+\def\c!dataset {dataset}
+\def\c!sectionblock {sectionblock}
\def\c!language {language}
\def\c!compressseparator{compressseparator}
\def\c!renderingsetup {renderingsetup}
@@ -48,6 +54,7 @@
\def\c!comma {comma}
\def\c!period {period}
\def\c!monthconversion {monthconversion}
+\def\c!authorconversion {authorconversion}
\def\c!comment {comment}
\def\c!textalign {textalign}
\def\c!up {up}
@@ -57,6 +64,9 @@
\def\c!group {group}
\def\c!groupsuffix {groupsuffix}
+\def\c!referencemethod {referencemethod} % forward both
+
+\def\v!dataset {dataset}
\def\v!compressseparator{compressseparator}
\def\v!notation {notation}
\def\v!endnote {endnote}
@@ -72,6 +82,8 @@
\def\v!words {words}
\def\v!combination {combination}
\def\v!norepeat {norepeat}
+\def\v!mixed {mixed}
+\def\v!centerlast {centerlast}
\def\s!lcgreek {lcgreek}
\def\s!ucgreek {ucgreek}
@@ -88,17 +100,29 @@
\def\s!current {current}
+\def\s!rel {rel}
+\def\s!ord {ord}
+
\def\c!HL {HL}
\def\c!VL {VL}
\def\c!NL {NL}
\ifdefined\v!kerncharacters\else \def\v!kerncharacters{kerncharacters} \fi % no time now for translations should be a e! actually
+\ifdefined\v!letterspacing \else \def\v!letterspacing {letterspacing} \fi % no time now for translations should be a e! actually
\ifdefined\v!stretched \else \def\v!stretched {stretched} \fi
\ifdefined\v!vulgarfraction\else \def\v!vulgarfraction{vulgarfraction} \fi
\ifdefined\v!block \else \def\v!block {block} \fi
\ifdefined\v!rule \else \def\v!rule {rule} \fi
\ifdefined\v!oldstyle \else \def\v!oldstyle {oldstyle} \fi
+\def\c!etallimit {etallimit}
+\def\c!etaldisplay{etaldisplay}
+\def\c!etaltext {etaltext}
+
+\ifdefined\v!simplelist\else \def\v!simplelist{simplelist} \fi
+\ifdefined\v!sorting \else \def\v!sorting {sorting} \fi
+\ifdefined\v!synonym \else \def\v!synonym {synonym} \fi
+
% stop todo
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/mult-en.mkii b/Master/texmf-dist/tex/context/base/mult-en.mkii
index c3ab2fc16d0..00861c3be98 100644
--- a/Master/texmf-dist/tex/context/base/mult-en.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-en.mkii
@@ -281,6 +281,7 @@
\setinterfacevariable{marginedge}{marginedge}
\setinterfacevariable{margintitle}{margintitle}
\setinterfacevariable{marking}{marking}
+\setinterfacevariable{math}{math}
\setinterfacevariable{mathalignment}{mathalignment}
\setinterfacevariable{mathcases}{mathcases}
\setinterfacevariable{mathematics}{mathematics}
@@ -414,12 +415,14 @@
\setinterfacevariable{sectionblockenvironment}{sectionblockenvironment}
\setinterfacevariable{sectionnumber}{sectionnumber}
\setinterfacevariable{see}{see}
+\setinterfacevariable{selectfont}{selectfont}
\setinterfacevariable{september}{september}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{serried}
\setinterfacevariable{setups}{setups}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{short}
+\setinterfacevariable{simplefonts}{simplefonts}
\setinterfacevariable{singlesided}{singlesided}
\setinterfacevariable{slanted}{slanted}
\setinterfacevariable{slantedbold}{slantedbold}
@@ -593,6 +596,12 @@
\setinterfaceconstant{blockway}{blockway}
\setinterfaceconstant{bodyfont}{bodyfont}
\setinterfaceconstant{boffset}{boffset}
+\setinterfaceconstant{boldfeatures}{boldfeatures}
+\setinterfaceconstant{boldfont}{boldfont}
+\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures}
+\setinterfaceconstant{bolditalicfont}{bolditalicfont}
+\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures}
+\setinterfaceconstant{boldslantedfont}{boldslantedfont}
\setinterfaceconstant{bookmark}{bookmark}
\setinterfaceconstant{bottom}{bottom}
\setinterfaceconstant{bottomafter}{bottomafter}
@@ -608,6 +617,7 @@
\setinterfaceconstant{calculate}{calculate}
\setinterfaceconstant{category}{category}
\setinterfaceconstant{ccommand}{ccommand}
+\setinterfaceconstant{check}{check}
\setinterfaceconstant{click}{click}
\setinterfaceconstant{clickin}{clickin}
\setinterfaceconstant{clickout}{clickout}
@@ -630,6 +640,7 @@
\setinterfaceconstant{component}{component}
\setinterfaceconstant{compoundhyphen}{compoundhyphen}
\setinterfaceconstant{compress}{compress}
+\setinterfaceconstant{concerns}{concerns}
\setinterfaceconstant{connector}{connector}
\setinterfaceconstant{continue}{continue}
\setinterfaceconstant{contrastcolor}{contrastcolor}
@@ -682,6 +693,7 @@
\setinterfaceconstant{factor}{factor}
\setinterfaceconstant{fallback}{fallback}
\setinterfaceconstant{family}{family}
+\setinterfaceconstant{features}{features}
\setinterfaceconstant{fieldbackgroundcolor}{fieldbackgroundcolor}
\setinterfaceconstant{fieldframecolor}{fieldframecolor}
\setinterfaceconstant{fieldlayer}{fieldlayer}
@@ -711,6 +723,7 @@
\setinterfaceconstant{from}{from}
\setinterfaceconstant{get}{get}
\setinterfaceconstant{global}{global}
+\setinterfaceconstant{goodies}{goodies}
\setinterfaceconstant{grid}{grid}
\setinterfaceconstant{hang}{hang}
\setinterfaceconstant{hcompact}{hcompact}
@@ -747,6 +760,8 @@
\setinterfaceconstant{interaction}{interaction}
\setinterfaceconstant{interlinespace}{interlinespace}
\setinterfaceconstant{internalgrid}{internalgrid}
+\setinterfaceconstant{italicfeatures}{italicfeatures}
+\setinterfaceconstant{italicfont}{italicfont}
\setinterfaceconstant{itemalign}{itemalign}
\setinterfaceconstant{items}{items}
\setinterfaceconstant{juniorsep}{juniorsep}
@@ -798,9 +813,12 @@
\setinterfaceconstant{marginedge}{marginedge}
\setinterfaceconstant{marginedgetext}{marginedgetext}
\setinterfaceconstant{margintext}{margintext}
+\setinterfaceconstant{mark}{mark}
\setinterfaceconstant{marking}{marking}
\setinterfaceconstant{marstyle}{marstyle}
\setinterfaceconstant{mask}{mask}
+\setinterfaceconstant{mathclass}{mathclass}
+\setinterfaceconstant{mathlimits}{mathlimits}
\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{max}
\setinterfaceconstant{maxdepth}{maxdepth}
@@ -818,8 +836,13 @@
\setinterfaceconstant{mindepth}{mindepth}
\setinterfaceconstant{minheight}{minheight}
\setinterfaceconstant{minwidth}{minwidth}
+\setinterfaceconstant{moffset}{moffset}
\setinterfaceconstant{monthconversion}{monthconversion}
\setinterfaceconstant{more}{more}
+\setinterfaceconstant{mpdepth}{mpdepth}
+\setinterfaceconstant{mpheight}{mpheight}
+\setinterfaceconstant{mpoffset}{mpoffset}
+\setinterfaceconstant{mpwidth}{mpwidth}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{name}
\setinterfaceconstant{namesep}{namesep}
@@ -859,6 +882,7 @@
\setinterfaceconstant{openaction}{openaction}
\setinterfaceconstant{openpage}{openpage}
\setinterfaceconstant{openpageaction}{openpageaction}
+\setinterfaceconstant{opticalsize}{opticalsize}
\setinterfaceconstant{option}{option}
\setinterfaceconstant{order}{order}
\setinterfaceconstant{orientation}{orientation}
@@ -926,6 +950,8 @@
\setinterfaceconstant{region}{region}
\setinterfaceconstant{regionin}{regionin}
\setinterfaceconstant{regionout}{regionout}
+\setinterfaceconstant{regularfeatures}{regularfeatures}
+\setinterfaceconstant{regularfont}{regularfont}
\setinterfaceconstant{repeat}{repeat}
\setinterfaceconstant{reset}{reset}
\setinterfaceconstant{resetnumber}{resetnumber}
@@ -972,6 +998,8 @@
\setinterfaceconstant{sectionstarter}{sectionstarter}
\setinterfaceconstant{sectionstopper}{sectionstopper}
\setinterfaceconstant{separator}{separator}
+\setinterfaceconstant{separatorcolor}{separatorcolor}
+\setinterfaceconstant{separatorstyle}{separatorstyle}
\setinterfaceconstant{set}{set}
\setinterfaceconstant{setups}{setups}
\setinterfaceconstant{shrink}{shrink}
@@ -982,7 +1010,11 @@
\setinterfaceconstant{sidespacebefore}{sidespacebefore}
\setinterfaceconstant{sign}{sign}
\setinterfaceconstant{size}{size}
+\setinterfaceconstant{slantedfeatures}{slantedfeatures}
+\setinterfaceconstant{slantedfont}{slantedfont}
\setinterfaceconstant{small}{small}
+\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures}
+\setinterfaceconstant{smallcapsfont}{smallcapsfont}
\setinterfaceconstant{solution}{solution}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{sorttype}
@@ -1013,6 +1045,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/Master/texmf-dist/tex/context/base/mult-fr.mkii b/Master/texmf-dist/tex/context/base/mult-fr.mkii
index 1ba4f3c8cf1..9afe371c2b8 100644
--- a/Master/texmf-dist/tex/context/base/mult-fr.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-fr.mkii
@@ -281,6 +281,7 @@
\setinterfacevariable{marginedge}{bordmarge}
\setinterfacevariable{margintitle}{titremarge}
\setinterfacevariable{marking}{marquage}
+\setinterfacevariable{math}{math}
\setinterfacevariable{mathalignment}{mathalignment}
\setinterfacevariable{mathcases}{mathcases}
\setinterfacevariable{mathematics}{mathematique}
@@ -414,12 +415,14 @@
\setinterfacevariable{sectionblockenvironment}{environementblocsection}
\setinterfacevariable{sectionnumber}{numerosection}
\setinterfacevariable{see}{voit}
+\setinterfacevariable{selectfont}{selectfont}
\setinterfacevariable{september}{septembre}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{serried}
\setinterfacevariable{setups}{reglages}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{short}
+\setinterfacevariable{simplefonts}{simplefonts}
\setinterfacevariable{singlesided}{recto}
\setinterfacevariable{slanted}{incline}
\setinterfacevariable{slantedbold}{grasincline}
@@ -593,6 +596,12 @@
\setinterfaceconstant{blockway}{blockway}
\setinterfaceconstant{bodyfont}{policecorps}
\setinterfaceconstant{boffset}{boffset}
+\setinterfaceconstant{boldfeatures}{boldfeatures}
+\setinterfaceconstant{boldfont}{boldfont}
+\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures}
+\setinterfaceconstant{bolditalicfont}{bolditalicfont}
+\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures}
+\setinterfaceconstant{boldslantedfont}{boldslantedfont}
\setinterfaceconstant{bookmark}{marquepage}
\setinterfaceconstant{bottom}{inf}
\setinterfaceconstant{bottomafter}{bottomafter}
@@ -608,6 +617,7 @@
\setinterfaceconstant{calculate}{calculer}
\setinterfaceconstant{category}{category}
\setinterfaceconstant{ccommand}{ccommande}
+\setinterfaceconstant{check}{check}
\setinterfaceconstant{click}{clic}
\setinterfaceconstant{clickin}{clicinterieur}
\setinterfaceconstant{clickout}{clicexterieur}
@@ -630,6 +640,7 @@
\setinterfaceconstant{component}{composant}
\setinterfaceconstant{compoundhyphen}{compoundhyphen}
\setinterfaceconstant{compress}{compress}
+\setinterfaceconstant{concerns}{concerns}
\setinterfaceconstant{connector}{connector}
\setinterfaceconstant{continue}{continue}
\setinterfaceconstant{contrastcolor}{coleurcontraste}
@@ -682,6 +693,7 @@
\setinterfaceconstant{factor}{facteur}
\setinterfaceconstant{fallback}{fallback}
\setinterfaceconstant{family}{famille}
+\setinterfaceconstant{features}{features}
\setinterfaceconstant{fieldbackgroundcolor}{couleurarrierreplanchamp}
\setinterfaceconstant{fieldframecolor}{couleurcadrechamp}
\setinterfaceconstant{fieldlayer}{calquechamp}
@@ -711,6 +723,7 @@
\setinterfaceconstant{from}{de}
\setinterfaceconstant{get}{obtient}
\setinterfaceconstant{global}{global}
+\setinterfaceconstant{goodies}{goodies}
\setinterfaceconstant{grid}{grille}
\setinterfaceconstant{hang}{suspend}
\setinterfaceconstant{hcompact}{hcompact}
@@ -747,6 +760,8 @@
\setinterfaceconstant{interaction}{interaction}
\setinterfaceconstant{interlinespace}{espaceinterligne}
\setinterfaceconstant{internalgrid}{internalgrid}
+\setinterfaceconstant{italicfeatures}{italicfeatures}
+\setinterfaceconstant{italicfont}{italicfont}
\setinterfaceconstant{itemalign}{itemalign}
\setinterfaceconstant{items}{elements}
\setinterfaceconstant{juniorsep}{juniorsep}
@@ -798,9 +813,12 @@
\setinterfaceconstant{marginedge}{bordmarge}
\setinterfaceconstant{marginedgetext}{textebordmarge}
\setinterfaceconstant{margintext}{textemarge}
+\setinterfaceconstant{mark}{mark}
\setinterfaceconstant{marking}{marquage}
\setinterfaceconstant{marstyle}{stylemarquage}
\setinterfaceconstant{mask}{mask}
+\setinterfaceconstant{mathclass}{mathclass}
+\setinterfaceconstant{mathlimits}{mathlimits}
\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{max}
\setinterfaceconstant{maxdepth}{maxdepth}
@@ -818,8 +836,13 @@
\setinterfaceconstant{mindepth}{profondeurmin}
\setinterfaceconstant{minheight}{hauteurmin}
\setinterfaceconstant{minwidth}{largeurmin}
+\setinterfaceconstant{moffset}{moffset}
\setinterfaceconstant{monthconversion}{monthconversion}
\setinterfaceconstant{more}{more}
+\setinterfaceconstant{mpdepth}{mpdepth}
+\setinterfaceconstant{mpheight}{mpheight}
+\setinterfaceconstant{mpoffset}{mpoffset}
+\setinterfaceconstant{mpwidth}{mpwidth}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{nom}
\setinterfaceconstant{namesep}{namesep}
@@ -859,6 +882,7 @@
\setinterfaceconstant{openaction}{actionouverture}
\setinterfaceconstant{openpage}{openpage}
\setinterfaceconstant{openpageaction}{actionouverturepage}
+\setinterfaceconstant{opticalsize}{opticalsize}
\setinterfaceconstant{option}{option}
\setinterfaceconstant{order}{order}
\setinterfaceconstant{orientation}{orientation}
@@ -926,6 +950,8 @@
\setinterfaceconstant{region}{region}
\setinterfaceconstant{regionin}{entreregion}
\setinterfaceconstant{regionout}{regionexterieure}
+\setinterfaceconstant{regularfeatures}{regularfeatures}
+\setinterfaceconstant{regularfont}{regularfont}
\setinterfaceconstant{repeat}{repete}
\setinterfaceconstant{reset}{reset}
\setinterfaceconstant{resetnumber}{raznumero}
@@ -972,6 +998,8 @@
\setinterfaceconstant{sectionstarter}{sectionstarter}
\setinterfaceconstant{sectionstopper}{sectionstopper}
\setinterfaceconstant{separator}{separateur}
+\setinterfaceconstant{separatorcolor}{separatorcolor}
+\setinterfaceconstant{separatorstyle}{separatorstyle}
\setinterfaceconstant{set}{set}
\setinterfaceconstant{setups}{reglages}
\setinterfaceconstant{shrink}{shrink}
@@ -982,7 +1010,11 @@
\setinterfaceconstant{sidespacebefore}{espacelateralavant}
\setinterfaceconstant{sign}{signe}
\setinterfaceconstant{size}{dimension}
+\setinterfaceconstant{slantedfeatures}{slantedfeatures}
+\setinterfaceconstant{slantedfont}{slantedfont}
\setinterfaceconstant{small}{petit}
+\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures}
+\setinterfaceconstant{smallcapsfont}{smallcapsfont}
\setinterfaceconstant{solution}{solution}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{sorttype}
@@ -1013,6 +1045,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/Master/texmf-dist/tex/context/base/mult-fun.lua b/Master/texmf-dist/tex/context/base/mult-fun.lua
index a661c53bb30..0f5bd8ace7d 100644
--- a/Master/texmf-dist/tex/context/base/mult-fun.lua
+++ b/Master/texmf-dist/tex/context/base/mult-fun.lua
@@ -17,7 +17,7 @@ return {
--
"sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian",
"tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos",
- "invsin", "invcos", "acosh", "asinh", "sinh", "cosh",
+ "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh",
"paired", "tripled",
"unitcircle", "fulldiamond", "unitdiamond", "fullsquare",
-- "halfcircle", "quartercircle",
@@ -96,6 +96,11 @@ return {
"drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels",
"drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions",
--
+ "undashed",
+ --
"decorated", "redecorated", "undecorated",
+ --
+ "passvariable", "passarrayvariable", "tostring", "format", "formatted",
+ "startpassingvariable", "stoppassingvariable",
},
}
diff --git a/Master/texmf-dist/tex/context/base/mult-ini.lua b/Master/texmf-dist/tex/context/base/mult-ini.lua
index 3b18738debf..08f1639d076 100644
--- a/Master/texmf-dist/tex/context/base/mult-ini.lua
+++ b/Master/texmf-dist/tex/context/base/mult-ini.lua
@@ -10,6 +10,9 @@ local format, gmatch, match = string.format, string.gmatch, string.match
local lpegmatch = lpeg.match
local serialize = table.serialize
+local context = context
+local commands = commands
+
local allocate = utilities.storage.allocate
local mark = utilities.storage.mark
local prtcatcodes = catcodes.numbers.prtcatcodes
@@ -296,12 +299,12 @@ function commands.getmessage(category,tag,default)
context(interfaces.getmessage(category,tag,default))
end
-function commands.showassignerror(namespace,key,value,line)
- local ns, instance = match(namespace,"^(%d+)[^%a]+(%a+)")
+function commands.showassignerror(namespace,key,line)
+ local ns, instance = match(namespace,"^(%d+)[^%a]+(%a*)")
if ns then
namespace = corenamespaces[tonumber(ns)] or ns
end
- if instance then
+ if instance and instance ~= "" then
context.writestatus("setup",formatters["error in line %a, namespace %a, instance %a, key %a"](line,namespace,instance,key))
else
context.writestatus("setup",formatters["error in line %a, namespace %a, key %a"](line,namespace,key))
diff --git a/Master/texmf-dist/tex/context/base/mult-it.mkii b/Master/texmf-dist/tex/context/base/mult-it.mkii
index 0d1ea911d1a..802cb840cd3 100644
--- a/Master/texmf-dist/tex/context/base/mult-it.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-it.mkii
@@ -281,6 +281,7 @@
\setinterfacevariable{marginedge}{bordomargine}
\setinterfacevariable{margintitle}{titoloinmargine}
\setinterfacevariable{marking}{marcatura}
+\setinterfacevariable{math}{math}
\setinterfacevariable{mathalignment}{mathalignment}
\setinterfacevariable{mathcases}{mathcases}
\setinterfacevariable{mathematics}{matematica}
@@ -414,12 +415,14 @@
\setinterfacevariable{sectionblockenvironment}{ambientebloccosezione}
\setinterfacevariable{sectionnumber}{numerosezione}
\setinterfacevariable{see}{vedi}
+\setinterfacevariable{selectfont}{selectfont}
\setinterfacevariable{september}{settembre}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{vicino}
\setinterfacevariable{setups}{nastaveni}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{short}
+\setinterfacevariable{simplefonts}{simplefonts}
\setinterfacevariable{singlesided}{facciasingola}
\setinterfacevariable{slanted}{inclinato}
\setinterfacevariable{slantedbold}{inclinatograssetto}
@@ -593,6 +596,12 @@
\setinterfaceconstant{blockway}{blockway}
\setinterfaceconstant{bodyfont}{fonttesto}
\setinterfaceconstant{boffset}{boffset}
+\setinterfaceconstant{boldfeatures}{boldfeatures}
+\setinterfaceconstant{boldfont}{boldfont}
+\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures}
+\setinterfaceconstant{bolditalicfont}{bolditalicfont}
+\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures}
+\setinterfaceconstant{boldslantedfont}{boldslantedfont}
\setinterfaceconstant{bookmark}{segnalibro}
\setinterfaceconstant{bottom}{fondo}
\setinterfaceconstant{bottomafter}{bottomafter}
@@ -608,6 +617,7 @@
\setinterfaceconstant{calculate}{calcola}
\setinterfaceconstant{category}{category}
\setinterfaceconstant{ccommand}{ccomando}
+\setinterfaceconstant{check}{check}
\setinterfaceconstant{click}{click}
\setinterfaceconstant{clickin}{clickdentro}
\setinterfaceconstant{clickout}{clickfuori}
@@ -630,6 +640,7 @@
\setinterfaceconstant{component}{component}
\setinterfaceconstant{compoundhyphen}{compoundhyphen}
\setinterfaceconstant{compress}{compress}
+\setinterfaceconstant{concerns}{concerns}
\setinterfaceconstant{connector}{connector}
\setinterfaceconstant{continue}{continua}
\setinterfaceconstant{contrastcolor}{colorecontrasto}
@@ -682,6 +693,7 @@
\setinterfaceconstant{factor}{fattore}
\setinterfaceconstant{fallback}{fallback}
\setinterfaceconstant{family}{famiglia}
+\setinterfaceconstant{features}{features}
\setinterfaceconstant{fieldbackgroundcolor}{coloresfondocampo}
\setinterfaceconstant{fieldframecolor}{colorecornicecampo}
\setinterfaceconstant{fieldlayer}{fieldlayer}
@@ -711,6 +723,7 @@
\setinterfaceconstant{from}{da}
\setinterfaceconstant{get}{prendi}
\setinterfaceconstant{global}{globale}
+\setinterfaceconstant{goodies}{goodies}
\setinterfaceconstant{grid}{griglia}
\setinterfaceconstant{hang}{sospendi}
\setinterfaceconstant{hcompact}{hcompact}
@@ -747,6 +760,8 @@
\setinterfaceconstant{interaction}{interazione}
\setinterfaceconstant{interlinespace}{interlinea}
\setinterfaceconstant{internalgrid}{internalgrid}
+\setinterfaceconstant{italicfeatures}{italicfeatures}
+\setinterfaceconstant{italicfont}{italicfont}
\setinterfaceconstant{itemalign}{itemalign}
\setinterfaceconstant{items}{elementi}
\setinterfaceconstant{juniorsep}{juniorsep}
@@ -798,9 +813,12 @@
\setinterfaceconstant{marginedge}{bordomargine}
\setinterfaceconstant{marginedgetext}{testobordomargine}
\setinterfaceconstant{margintext}{testomargine}
+\setinterfaceconstant{mark}{mark}
\setinterfaceconstant{marking}{marcatura}
\setinterfaceconstant{marstyle}{stilemarcatura}
\setinterfaceconstant{mask}{mask}
+\setinterfaceconstant{mathclass}{mathclass}
+\setinterfaceconstant{mathlimits}{mathlimits}
\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{max}
\setinterfaceconstant{maxdepth}{maxdepth}
@@ -818,8 +836,13 @@
\setinterfaceconstant{mindepth}{mindeoth}
\setinterfaceconstant{minheight}{altezzamin}
\setinterfaceconstant{minwidth}{ampiezzamin}
+\setinterfaceconstant{moffset}{moffset}
\setinterfaceconstant{monthconversion}{monthconversion}
\setinterfaceconstant{more}{more}
+\setinterfaceconstant{mpdepth}{mpdepth}
+\setinterfaceconstant{mpheight}{mpheight}
+\setinterfaceconstant{mpoffset}{mpoffset}
+\setinterfaceconstant{mpwidth}{mpwidth}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{nome}
\setinterfaceconstant{namesep}{namesep}
@@ -859,6 +882,7 @@
\setinterfaceconstant{openaction}{azioneapri}
\setinterfaceconstant{openpage}{openpage}
\setinterfaceconstant{openpageaction}{azioneapripagina}
+\setinterfaceconstant{opticalsize}{opticalsize}
\setinterfaceconstant{option}{opzione}
\setinterfaceconstant{order}{order}
\setinterfaceconstant{orientation}{orientation}
@@ -926,6 +950,8 @@
\setinterfaceconstant{region}{region}
\setinterfaceconstant{regionin}{entraregione}
\setinterfaceconstant{regionout}{esciregione}
+\setinterfaceconstant{regularfeatures}{regularfeatures}
+\setinterfaceconstant{regularfont}{regularfont}
\setinterfaceconstant{repeat}{ripeti}
\setinterfaceconstant{reset}{reset}
\setinterfaceconstant{resetnumber}{resetnumber}
@@ -972,6 +998,8 @@
\setinterfaceconstant{sectionstarter}{sectionstarter}
\setinterfaceconstant{sectionstopper}{sectionstopper}
\setinterfaceconstant{separator}{separatore}
+\setinterfaceconstant{separatorcolor}{separatorcolor}
+\setinterfaceconstant{separatorstyle}{separatorstyle}
\setinterfaceconstant{set}{set}
\setinterfaceconstant{setups}{setups}
\setinterfaceconstant{shrink}{shrink}
@@ -982,7 +1010,11 @@
\setinterfaceconstant{sidespacebefore}{spaziolateraleprima}
\setinterfaceconstant{sign}{segno}
\setinterfaceconstant{size}{dimensione}
+\setinterfaceconstant{slantedfeatures}{slantedfeatures}
+\setinterfaceconstant{slantedfont}{slantedfont}
\setinterfaceconstant{small}{piccolo}
+\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures}
+\setinterfaceconstant{smallcapsfont}{smallcapsfont}
\setinterfaceconstant{solution}{solution}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{sorttype}
@@ -1013,6 +1045,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/Master/texmf-dist/tex/context/base/mult-low.lua b/Master/texmf-dist/tex/context/base/mult-low.lua
index 47e31978bfe..86095edab9c 100644
--- a/Master/texmf-dist/tex/context/base/mult-low.lua
+++ b/Master/texmf-dist/tex/context/base/mult-low.lua
@@ -47,7 +47,7 @@ return {
"inicatcodes",
"ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes",
"prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes",
- "xmlcatcodes",
+ "xmlcatcodes", "ctdcatcodes",
--
"escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode",
"endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode",
@@ -90,13 +90,14 @@ return {
--
"startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset",
"doifmode", "doifmodeelse", "doifnotmode",
+ "startmodeset","stopmodeset",
"startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes",
"startenvironment", "stopenvironment", "environment",
"startcomponent", "stopcomponent", "component",
"startproduct", "stopproduct", "product",
"startproject", "stopproject", "project",
"starttext", "stoptext", "startnotext", "stopnotext","startdocument", "stopdocument", "documentvariable", "setupdocument",
- "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule",
+ "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule","setupmodule","currentmoduleparameter","moduleparameter",
--
"startTEXpage", "stopTEXpage",
-- "startMPpage", "stopMPpage", -- already catched by nested lexer
@@ -116,6 +117,14 @@ return {
"continueifinputfile",
--
"luastringsep", "!!bs", "!!es",
+ --
+ "lefttorightmark", "righttoleftmark",
+ --
+ "breakablethinspace", "nobreakspace", "narrownobreakspace", "zerowidthnobreakspace",
+ "ideographicspace", "ideographichalffillspace",
+ "twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace",
+ "figurespace", "punctuationspace", "hairspace",
+ "zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj",
},
["helpers"] = {
--
@@ -128,6 +137,7 @@ return {
"starttexdefinition", "stoptexdefinition",
"starttexcode", "stoptexcode",
"startcontextcode", "stopcontextcode",
+ "startcontextdefinitioncode", "stopcontextdefinitioncode",
--
"doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup",
"doifelsecommandhandler","doifnotcommandhandler","doifcommandhandler",
@@ -172,7 +182,7 @@ return {
--
"ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg",
"ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg",
- "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter",
+ "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox",
"ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern",
"ruledhglue", "ruledvglue", "normalhglue", "normalvglue",
"ruledpenalty",
@@ -186,6 +196,8 @@ return {
"scratchtoks", "globalscratchtoks",
"scratchbox", "globalscratchbox",
--
+ "normalbaselineskip", "normallineskip", "normallineskiplimit",
+ --
"availablehsize", "localhsize", "setlocalhsize",
--
"nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs",
@@ -209,7 +221,9 @@ return {
--
"doif", "doifnot", "doifelse",
"doifinset", "doifnotinset", "doifinsetelse",
- "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse",
+ "doifnextcharelse", "doifnextoptionalelse", "doifnextoptionalcselse", "doiffastoptionalcheckelse",
+ "doifnextbgroupelse", "doifnextbgroupcselse",
+ "doifnextparenthesiselse",
"doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined",
"doifelsevalue", "doifvalue", "doifnotvalue",
"doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse",
@@ -269,7 +283,9 @@ return {
--
"dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith",
--
- "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant",
+ "newconstant", "setnewconstant", "setconstant", "setconstantvalue",
+ "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue",
+ --
"newmacro", "setnewmacro", "newfraction",
"newsignal",
--
@@ -288,6 +304,8 @@ return {
--
"twodigits","threedigits",
--
+ "leftorright",
+ --
"strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight",
--
"ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing",
@@ -339,9 +357,23 @@ return {
"definenamedlua",
"obeylualines", "obeyluatokens",
"startluacode", "stopluacode", "startlua", "stoplua",
+ "startctxfunction","stopctxfunction","ctxfunction",
+ "startctxfunctiondefinition","stopctxfunctiondefinition",
--
"carryoverpar",
--
+ "assumelongusagecs",
+ --
"Umathbotaccent",
+ --
+ "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop",
+ "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop",
+ "autodirhbox", "autodirvbox", "autodirvtop",
+ "lefttoright", "righttoleft","synchronizelayoutdirection","synchronizedisplaydirection","synchronizeinlinedirection",
+ --
+ "lesshyphens", "morehyphens", "nohyphens", "dohyphens",
+ --
+ "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath",
+ --
}
}
diff --git a/Master/texmf-dist/tex/context/base/mult-mes.lua b/Master/texmf-dist/tex/context/base/mult-mes.lua
index aed417c92b0..d9ee151a8e8 100644
--- a/Master/texmf-dist/tex/context/base/mult-mes.lua
+++ b/Master/texmf-dist/tex/context/base/mult-mes.lua
@@ -982,13 +982,13 @@ return {
en = "file %a not found, waiting for bibtex",
},
["publications:3"] = {
- en = "wrote a new auxiliary file \\jobname.aux",
+ en = "wrote a new auxiliary file %a",
},
["publications:4"] = {
en = "loading database from %a",
},
["publications:5"] = {
- en = "warning: unknown cite argument %a on line \\the\\inputlineno",
+ en = "warning: unknown cite argument %a on line %a",
},
["publications:6"] = {
en = "loading formatting style from %a",
diff --git a/Master/texmf-dist/tex/context/base/mult-mps.lua b/Master/texmf-dist/tex/context/base/mult-mps.lua
index 59411cd97c4..104b9d42ef0 100644
--- a/Master/texmf-dist/tex/context/base/mult-mps.lua
+++ b/Master/texmf-dist/tex/context/base/mult-mps.lua
@@ -31,7 +31,7 @@ return {
"def", "vardef", "enddef", "expr", "suffix", "text", "primary", "secondary",
"tertiary", "primarydef", "secondarydef", "tertiarydef",
"randomseed", "also", "contour", "doublepath",
- "withcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within",
+ "withcolor", "withcmykcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within",
"forsuffixes", "downto", "upto", "step", "until",
"charlist", "extensible", "fontdimen", "headerbyte", "kern", "ligtable",
"boundarychar", "chardp", "charext", "charht", "charic", "charwd", "designsize",
@@ -51,15 +51,19 @@ return {
"top", "bot", "lft", "rt", "ulft", "urt", "llft", "lrt",
--
"redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "greypart",
+ "prescriptpart", "postscriptpart",
"rgbcolor", "cmykcolor", "greycolor", "graycolor",
"colormodel", "graypart",
"dashpart", "penpart",
-- "colorpart",
- "stroked", "filled", "textual", "clipped", "bounded",
+ "stroked", "filled", "textual", "clipped", "bounded", "pathpart",
"expandafter",
+ "minute", "hour", "outputformat", "outputtemplate", "filenametemplate", "fontmapfile", "fontmapline",
+ "fontpart", "fontsize", "glyph", "restoreclipcolor", "troffmode",
},
commands = {
"beginfig", "endfig",
+ "beginglyph", "endglyph", "charscale",
"rotatedaround", "reflectedabout",
"arrowhead",
"currentpen", "currentpicture", "cuttings",
@@ -103,6 +107,8 @@ return {
"graypart", "graycolor",
--
"mm", "pt", "dd", "bp", "cm", "pc", "cc", "in",
+ --
+ "triplet", "quadruplet",
},
internals = { -- we need to remove duplicates above
--
diff --git a/Master/texmf-dist/tex/context/base/mult-nl.mkii b/Master/texmf-dist/tex/context/base/mult-nl.mkii
index 5f1bada7afd..015f58ff15c 100644
--- a/Master/texmf-dist/tex/context/base/mult-nl.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-nl.mkii
@@ -281,6 +281,7 @@
\setinterfacevariable{marginedge}{kantlijn}
\setinterfacevariable{margintitle}{margetitel}
\setinterfacevariable{marking}{markering}
+\setinterfacevariable{math}{math}
\setinterfacevariable{mathalignment}{wiskundeuitlijnen}
\setinterfacevariable{mathcases}{mathcases}
\setinterfacevariable{mathematics}{wiskunde}
@@ -414,12 +415,14 @@
\setinterfacevariable{sectionblockenvironment}{sectieblokomgeving}
\setinterfacevariable{sectionnumber}{sectienummer}
\setinterfacevariable{see}{zie}
+\setinterfacevariable{selectfont}{selectfont}
\setinterfacevariable{september}{september}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{aanelkaar}
\setinterfacevariable{setups}{instellingen}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{kort}
+\setinterfacevariable{simplefonts}{simplefonts}
\setinterfacevariable{singlesided}{enkelzijdig}
\setinterfacevariable{slanted}{schuin}
\setinterfacevariable{slantedbold}{schuinvet}
@@ -593,6 +596,12 @@
\setinterfaceconstant{blockway}{blokwijze}
\setinterfaceconstant{bodyfont}{korps}
\setinterfaceconstant{boffset}{boffset}
+\setinterfaceconstant{boldfeatures}{boldfeatures}
+\setinterfaceconstant{boldfont}{boldfont}
+\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures}
+\setinterfaceconstant{bolditalicfont}{bolditalicfont}
+\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures}
+\setinterfaceconstant{boldslantedfont}{boldslantedfont}
\setinterfaceconstant{bookmark}{bookmark}
\setinterfaceconstant{bottom}{onder}
\setinterfaceconstant{bottomafter}{bottomafter}
@@ -608,6 +617,7 @@
\setinterfaceconstant{calculate}{bereken}
\setinterfaceconstant{category}{categorie}
\setinterfaceconstant{ccommand}{ccommando}
+\setinterfaceconstant{check}{check}
\setinterfaceconstant{click}{klik}
\setinterfaceconstant{clickin}{klikin}
\setinterfaceconstant{clickout}{klikuit}
@@ -630,6 +640,7 @@
\setinterfaceconstant{component}{component}
\setinterfaceconstant{compoundhyphen}{koppelteken}
\setinterfaceconstant{compress}{comprimeren}
+\setinterfaceconstant{concerns}{betreft}
\setinterfaceconstant{connector}{connector}
\setinterfaceconstant{continue}{doorgaan}
\setinterfaceconstant{contrastcolor}{contrastkleur}
@@ -682,6 +693,7 @@
\setinterfaceconstant{factor}{factor}
\setinterfaceconstant{fallback}{terugval}
\setinterfaceconstant{family}{soort}
+\setinterfaceconstant{features}{features}
\setinterfaceconstant{fieldbackgroundcolor}{veldachtergrondkleur}
\setinterfaceconstant{fieldframecolor}{veldkaderkleur}
\setinterfaceconstant{fieldlayer}{veldlaag}
@@ -711,6 +723,7 @@
\setinterfaceconstant{from}{van}
\setinterfaceconstant{get}{haal}
\setinterfaceconstant{global}{globaal}
+\setinterfaceconstant{goodies}{goodies}
\setinterfaceconstant{grid}{grid}
\setinterfaceconstant{hang}{hang}
\setinterfaceconstant{hcompact}{hcomprimeer}
@@ -747,6 +760,8 @@
\setinterfaceconstant{interaction}{interactie}
\setinterfaceconstant{interlinespace}{interlinie}
\setinterfaceconstant{internalgrid}{interngrid}
+\setinterfaceconstant{italicfeatures}{italicfeatures}
+\setinterfaceconstant{italicfont}{italicfont}
\setinterfaceconstant{itemalign}{itemuitlijnen}
\setinterfaceconstant{items}{items}
\setinterfaceconstant{juniorsep}{juniorsep}
@@ -798,9 +813,12 @@
\setinterfaceconstant{marginedge}{kantlijn}
\setinterfaceconstant{marginedgetext}{kantlijntekst}
\setinterfaceconstant{margintext}{margetekst}
+\setinterfaceconstant{mark}{kernmerk}
\setinterfaceconstant{marking}{markering}
\setinterfaceconstant{marstyle}{marletter}
\setinterfaceconstant{mask}{masker}
+\setinterfaceconstant{mathclass}{mathclass}
+\setinterfaceconstant{mathlimits}{mathlimits}
\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{max}
\setinterfaceconstant{maxdepth}{maxdepth}
@@ -818,8 +836,13 @@
\setinterfaceconstant{mindepth}{mindiepte}
\setinterfaceconstant{minheight}{minhoogte}
\setinterfaceconstant{minwidth}{minbreedte}
+\setinterfaceconstant{moffset}{moffset}
\setinterfaceconstant{monthconversion}{maandconversie}
\setinterfaceconstant{more}{meer}
+\setinterfaceconstant{mpdepth}{mpdiepte}
+\setinterfaceconstant{mpheight}{mphoogte}
+\setinterfaceconstant{mpoffset}{mpoffset}
+\setinterfaceconstant{mpwidth}{mpbreedte}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{naam}
\setinterfaceconstant{namesep}{namesep}
@@ -859,6 +882,7 @@
\setinterfaceconstant{openaction}{openactie}
\setinterfaceconstant{openpage}{openpagina}
\setinterfaceconstant{openpageaction}{openpaginaactie}
+\setinterfaceconstant{opticalsize}{opticalsize}
\setinterfaceconstant{option}{optie}
\setinterfaceconstant{order}{volgorde}
\setinterfaceconstant{orientation}{orientatie}
@@ -920,12 +944,14 @@
\setinterfaceconstant{reduction}{reductie}
\setinterfaceconstant{ref}{ref}
\setinterfaceconstant{refcommand}{refcommand}
-\setinterfaceconstant{reference}{verwijzing}
+\setinterfaceconstant{reference}{referentie}
\setinterfaceconstant{referenceprefix}{referenceprefix}
\setinterfaceconstant{referencing}{refereren}
\setinterfaceconstant{region}{gebied}
\setinterfaceconstant{regionin}{gebiedin}
\setinterfaceconstant{regionout}{gebieduit}
+\setinterfaceconstant{regularfeatures}{regularfeatures}
+\setinterfaceconstant{regularfont}{regularfont}
\setinterfaceconstant{repeat}{herhaal}
\setinterfaceconstant{reset}{reset}
\setinterfaceconstant{resetnumber}{resetnummer}
@@ -972,6 +998,8 @@
\setinterfaceconstant{sectionstarter}{sectionstarter}
\setinterfaceconstant{sectionstopper}{sectionstopper}
\setinterfaceconstant{separator}{scheider}
+\setinterfaceconstant{separatorcolor}{scheiderkleur}
+\setinterfaceconstant{separatorstyle}{scheiderletter}
\setinterfaceconstant{set}{set}
\setinterfaceconstant{setups}{setups}
\setinterfaceconstant{shrink}{krimp}
@@ -982,7 +1010,11 @@
\setinterfaceconstant{sidespacebefore}{zijvoorwit}
\setinterfaceconstant{sign}{teken}
\setinterfaceconstant{size}{formaat}
+\setinterfaceconstant{slantedfeatures}{slantedfeatures}
+\setinterfaceconstant{slantedfont}{slantedfont}
\setinterfaceconstant{small}{klein}
+\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures}
+\setinterfaceconstant{smallcapsfont}{smallcapsfont}
\setinterfaceconstant{solution}{oplossing}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{sortering}
@@ -1013,6 +1045,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixscheider}
\setinterfaceconstant{suffixstopper}{suffixafsluiter}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1060,7 +1094,7 @@
\setinterfaceconstant{totalnumber}{totalnumber}
\setinterfaceconstant{type}{type}
\setinterfaceconstant{unit}{eenheid}
-\setinterfaceconstant{unknownreference}{onbekendeverwijzing}
+\setinterfaceconstant{unknownreference}{onbekendereferentie}
\setinterfaceconstant{urlalternative}{urlvariant}
\setinterfaceconstant{urlspace}{urlspatie}
\setinterfaceconstant{validate}{valideer}
diff --git a/Master/texmf-dist/tex/context/base/mult-pe.mkii b/Master/texmf-dist/tex/context/base/mult-pe.mkii
index f55a7ab5936..999b16cf5e0 100644
--- a/Master/texmf-dist/tex/context/base/mult-pe.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-pe.mkii
@@ -281,6 +281,7 @@
\setinterfacevariable{marginedge}{لبهحاشیه}
\setinterfacevariable{margintitle}{عنوانحاشیه}
\setinterfacevariable{marking}{نشانهگذاری}
+\setinterfacevariable{math}{math}
\setinterfacevariable{mathalignment}{تنظیمریاضی}
\setinterfacevariable{mathcases}{حالتهایریاضی}
\setinterfacevariable{mathematics}{ریاضی}
@@ -414,12 +415,14 @@
\setinterfacevariable{sectionblockenvironment}{محیطبلوکبخش}
\setinterfacevariable{sectionnumber}{شمارهبخش}
\setinterfacevariable{see}{ببینید}
+\setinterfacevariable{selectfont}{selectfont}
\setinterfacevariable{september}{سپتامبر}
\setinterfacevariable{serif}{سریف}
\setinterfacevariable{serried}{تنگهم}
\setinterfacevariable{setups}{بارگذاریها}
\setinterfacevariable{sheet}{ورقه}
\setinterfacevariable{short}{short}
+\setinterfacevariable{simplefonts}{simplefonts}
\setinterfacevariable{singlesided}{یکرو}
\setinterfacevariable{slanted}{خوابیده}
\setinterfacevariable{slantedbold}{مشکیخوابیده}
@@ -593,6 +596,12 @@
\setinterfaceconstant{blockway}{راهبلوک}
\setinterfaceconstant{bodyfont}{قلمبدنه}
\setinterfaceconstant{boffset}{boffset}
+\setinterfaceconstant{boldfeatures}{boldfeatures}
+\setinterfaceconstant{boldfont}{boldfont}
+\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures}
+\setinterfaceconstant{bolditalicfont}{bolditalicfont}
+\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures}
+\setinterfaceconstant{boldslantedfont}{boldslantedfont}
\setinterfaceconstant{bookmark}{چوبخط}
\setinterfaceconstant{bottom}{پایین}
\setinterfaceconstant{bottomafter}{bottomafter}
@@ -608,6 +617,7 @@
\setinterfaceconstant{calculate}{محاسبه}
\setinterfaceconstant{category}{category}
\setinterfaceconstant{ccommand}{فرمان}
+\setinterfaceconstant{check}{check}
\setinterfaceconstant{click}{فشردن}
\setinterfaceconstant{clickin}{فشردنداخل}
\setinterfaceconstant{clickout}{فشردنخارج}
@@ -630,6 +640,7 @@
\setinterfaceconstant{component}{مولفه}
\setinterfaceconstant{compoundhyphen}{compoundhyphen}
\setinterfaceconstant{compress}{فشردن}
+\setinterfaceconstant{concerns}{concerns}
\setinterfaceconstant{connector}{connector}
\setinterfaceconstant{continue}{ادامه}
\setinterfaceconstant{contrastcolor}{contrastcolor}
@@ -682,6 +693,7 @@
\setinterfaceconstant{factor}{عامل}
\setinterfaceconstant{fallback}{عقبریختن}
\setinterfaceconstant{family}{خانواده}
+\setinterfaceconstant{features}{features}
\setinterfaceconstant{fieldbackgroundcolor}{رنگپسزمینهمیدان}
\setinterfaceconstant{fieldframecolor}{رنگقالبمیدان}
\setinterfaceconstant{fieldlayer}{لایهمیدان}
@@ -711,6 +723,7 @@
\setinterfaceconstant{from}{از}
\setinterfaceconstant{get}{بگیر}
\setinterfaceconstant{global}{سراسری}
+\setinterfaceconstant{goodies}{goodies}
\setinterfaceconstant{grid}{توری}
\setinterfaceconstant{hang}{بیاویز}
\setinterfaceconstant{hcompact}{hcompact}
@@ -747,6 +760,8 @@
\setinterfaceconstant{interaction}{پانل}
\setinterfaceconstant{interlinespace}{فضایبینخط}
\setinterfaceconstant{internalgrid}{internalgrid}
+\setinterfaceconstant{italicfeatures}{italicfeatures}
+\setinterfaceconstant{italicfont}{italicfont}
\setinterfaceconstant{itemalign}{تنظیمآیتم}
\setinterfaceconstant{items}{آیتمها}
\setinterfaceconstant{juniorsep}{juniorsep}
@@ -798,9 +813,12 @@
\setinterfaceconstant{marginedge}{لبهحاشیه}
\setinterfaceconstant{marginedgetext}{متنلبهحاشیه}
\setinterfaceconstant{margintext}{متنحاشیه}
+\setinterfaceconstant{mark}{mark}
\setinterfaceconstant{marking}{نشانهگذاری}
\setinterfaceconstant{marstyle}{سبکحاش}
\setinterfaceconstant{mask}{mask}
+\setinterfaceconstant{mathclass}{mathclass}
+\setinterfaceconstant{mathlimits}{mathlimits}
\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{بیشترین}
\setinterfaceconstant{maxdepth}{maxdepth}
@@ -818,8 +836,13 @@
\setinterfaceconstant{mindepth}{کمترینعمق}
\setinterfaceconstant{minheight}{کمترینارتفاع}
\setinterfaceconstant{minwidth}{کمترینعرض}
+\setinterfaceconstant{moffset}{moffset}
\setinterfaceconstant{monthconversion}{monthconversion}
\setinterfaceconstant{more}{more}
+\setinterfaceconstant{mpdepth}{mpdepth}
+\setinterfaceconstant{mpheight}{mpheight}
+\setinterfaceconstant{mpoffset}{mpoffset}
+\setinterfaceconstant{mpwidth}{mpwidth}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{نام}
\setinterfaceconstant{namesep}{namesep}
@@ -859,6 +882,7 @@
\setinterfaceconstant{openaction}{عملباز}
\setinterfaceconstant{openpage}{openpage}
\setinterfaceconstant{openpageaction}{عملصفحهباز}
+\setinterfaceconstant{opticalsize}{opticalsize}
\setinterfaceconstant{option}{گزینه}
\setinterfaceconstant{order}{order}
\setinterfaceconstant{orientation}{جهتدهی}
@@ -926,6 +950,8 @@
\setinterfaceconstant{region}{region}
\setinterfaceconstant{regionin}{ناحیهدرون}
\setinterfaceconstant{regionout}{ناحیهبیرون}
+\setinterfaceconstant{regularfeatures}{regularfeatures}
+\setinterfaceconstant{regularfont}{regularfont}
\setinterfaceconstant{repeat}{تکرار}
\setinterfaceconstant{reset}{بازنشانی}
\setinterfaceconstant{resetnumber}{بازنشانیشماره}
@@ -972,6 +998,8 @@
\setinterfaceconstant{sectionstarter}{sectionstarter}
\setinterfaceconstant{sectionstopper}{sectionstopper}
\setinterfaceconstant{separator}{جداکننده}
+\setinterfaceconstant{separatorcolor}{separatorcolor}
+\setinterfaceconstant{separatorstyle}{separatorstyle}
\setinterfaceconstant{set}{قراربده}
\setinterfaceconstant{setups}{بارگذاریها}
\setinterfaceconstant{shrink}{shrink}
@@ -982,7 +1010,11 @@
\setinterfaceconstant{sidespacebefore}{فضایکناریقبل}
\setinterfaceconstant{sign}{علامت}
\setinterfaceconstant{size}{اندازه}
+\setinterfaceconstant{slantedfeatures}{slantedfeatures}
+\setinterfaceconstant{slantedfont}{slantedfont}
\setinterfaceconstant{small}{کوچک}
+\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures}
+\setinterfaceconstant{smallcapsfont}{smallcapsfont}
\setinterfaceconstant{solution}{solution}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{ترتیبتایپ}
@@ -1013,6 +1045,8 @@
\setinterfaceconstant{suffix}{پسوند}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/Master/texmf-dist/tex/context/base/mult-prm.lua b/Master/texmf-dist/tex/context/base/mult-prm.lua
index e6fa4abccf3..f0b850a5c26 100644
--- a/Master/texmf-dist/tex/context/base/mult-prm.lua
+++ b/Master/texmf-dist/tex/context/base/mult-prm.lua
@@ -235,6 +235,7 @@ return {
"luatexdatestamp",
"luatexrevision",
"luatexversion",
+ "luafunction",
"mathstyle",
"nokerns",
"noligs",
@@ -573,10 +574,10 @@ return {
"catcodetable",
"char",
"chardef",
- "chardp",
- "charht",
- "charit",
- "charwd",
+--"chardp",
+--"charht",
+--"charit",
+--"charwd",
"cleaders",
"clearmarks",
"closein",
diff --git a/Master/texmf-dist/tex/context/base/mult-ro.mkii b/Master/texmf-dist/tex/context/base/mult-ro.mkii
index 34dd385a37b..f577eabda1f 100644
--- a/Master/texmf-dist/tex/context/base/mult-ro.mkii
+++ b/Master/texmf-dist/tex/context/base/mult-ro.mkii
@@ -281,6 +281,7 @@
\setinterfacevariable{marginedge}{marginebordura}
\setinterfacevariable{margintitle}{titlumarginal}
\setinterfacevariable{marking}{marcaje}
+\setinterfacevariable{math}{math}
\setinterfacevariable{mathalignment}{mathalignment}
\setinterfacevariable{mathcases}{mathcases}
\setinterfacevariable{mathematics}{matematica}
@@ -414,12 +415,14 @@
\setinterfacevariable{sectionblockenvironment}{blocsectiuneambient}
\setinterfacevariable{sectionnumber}{numarsetiune}
\setinterfacevariable{see}{vezi}
+\setinterfacevariable{selectfont}{selectfont}
\setinterfacevariable{september}{septembrie}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{serried}
\setinterfacevariable{setups}{setari}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{short}
+\setinterfacevariable{simplefonts}{simplefonts}
\setinterfacevariable{singlesided}{ofata}
\setinterfacevariable{slanted}{inclinat}
\setinterfacevariable{slantedbold}{inclinataldin}
@@ -593,6 +596,12 @@
\setinterfaceconstant{blockway}{blockway}
\setinterfaceconstant{bodyfont}{fonttext}
\setinterfaceconstant{boffset}{boffset}
+\setinterfaceconstant{boldfeatures}{boldfeatures}
+\setinterfaceconstant{boldfont}{boldfont}
+\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures}
+\setinterfaceconstant{bolditalicfont}{bolditalicfont}
+\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures}
+\setinterfaceconstant{boldslantedfont}{boldslantedfont}
\setinterfaceconstant{bookmark}{semncarte}
\setinterfaceconstant{bottom}{jos}
\setinterfaceconstant{bottomafter}{bottomafter}
@@ -608,6 +617,7 @@
\setinterfaceconstant{calculate}{calculeaza}
\setinterfaceconstant{category}{category}
\setinterfaceconstant{ccommand}{comandac}
+\setinterfaceconstant{check}{check}
\setinterfaceconstant{click}{click}
\setinterfaceconstant{clickin}{clickintru}
\setinterfaceconstant{clickout}{clickies}
@@ -630,6 +640,7 @@
\setinterfaceconstant{component}{component}
\setinterfaceconstant{compoundhyphen}{compoundhyphen}
\setinterfaceconstant{compress}{compress}
+\setinterfaceconstant{concerns}{concerns}
\setinterfaceconstant{connector}{connector}
\setinterfaceconstant{continue}{continua}
\setinterfaceconstant{contrastcolor}{culoarecontrast}
@@ -682,6 +693,7 @@
\setinterfaceconstant{factor}{factor}
\setinterfaceconstant{fallback}{fallback}
\setinterfaceconstant{family}{familie}
+\setinterfaceconstant{features}{features}
\setinterfaceconstant{fieldbackgroundcolor}{culoarefundalcamp}
\setinterfaceconstant{fieldframecolor}{culoareframecamp}
\setinterfaceconstant{fieldlayer}{fieldlayer}
@@ -711,6 +723,7 @@
\setinterfaceconstant{from}{dela}
\setinterfaceconstant{get}{adu}
\setinterfaceconstant{global}{global}
+\setinterfaceconstant{goodies}{goodies}
\setinterfaceconstant{grid}{grid}
\setinterfaceconstant{hang}{suspenda}
\setinterfaceconstant{hcompact}{hcompact}
@@ -747,6 +760,8 @@
\setinterfaceconstant{interaction}{interactiune}
\setinterfaceconstant{interlinespace}{spatiereinterliniara}
\setinterfaceconstant{internalgrid}{internalgrid}
+\setinterfaceconstant{italicfeatures}{italicfeatures}
+\setinterfaceconstant{italicfont}{italicfont}
\setinterfaceconstant{itemalign}{itemalign}
\setinterfaceconstant{items}{elemente}
\setinterfaceconstant{juniorsep}{juniorsep}
@@ -798,9 +813,12 @@
\setinterfaceconstant{marginedge}{coltbordura}
\setinterfaceconstant{marginedgetext}{textcoltbordura}
\setinterfaceconstant{margintext}{textmargine}
+\setinterfaceconstant{mark}{mark}
\setinterfaceconstant{marking}{marcaje}
\setinterfaceconstant{marstyle}{stilmarcaj}
\setinterfaceconstant{mask}{mask}
+\setinterfaceconstant{mathclass}{mathclass}
+\setinterfaceconstant{mathlimits}{mathlimits}
\setinterfaceconstant{mathstyle}{mathstyle}
\setinterfaceconstant{max}{max}
\setinterfaceconstant{maxdepth}{maxdepth}
@@ -818,8 +836,13 @@
\setinterfaceconstant{mindepth}{mindepth}
\setinterfaceconstant{minheight}{inaltimeminima}
\setinterfaceconstant{minwidth}{latimeminima}
+\setinterfaceconstant{moffset}{moffset}
\setinterfaceconstant{monthconversion}{monthconversion}
\setinterfaceconstant{more}{more}
+\setinterfaceconstant{mpdepth}{mpdepth}
+\setinterfaceconstant{mpheight}{mpheight}
+\setinterfaceconstant{mpoffset}{mpoffset}
+\setinterfaceconstant{mpwidth}{mpwidth}
\setinterfaceconstant{n}{n}
\setinterfaceconstant{name}{nume}
\setinterfaceconstant{namesep}{namesep}
@@ -859,6 +882,7 @@
\setinterfaceconstant{openaction}{actiunedeschidere}
\setinterfaceconstant{openpage}{openpage}
\setinterfaceconstant{openpageaction}{actiunedeschiderepagina}
+\setinterfaceconstant{opticalsize}{opticalsize}
\setinterfaceconstant{option}{optiune}
\setinterfaceconstant{order}{order}
\setinterfaceconstant{orientation}{orientation}
@@ -926,6 +950,8 @@
\setinterfaceconstant{region}{region}
\setinterfaceconstant{regionin}{regiuneintrare}
\setinterfaceconstant{regionout}{regiuneiesire}
+\setinterfaceconstant{regularfeatures}{regularfeatures}
+\setinterfaceconstant{regularfont}{regularfont}
\setinterfaceconstant{repeat}{repeta}
\setinterfaceconstant{reset}{reset}
\setinterfaceconstant{resetnumber}{resetnumber}
@@ -972,6 +998,8 @@
\setinterfaceconstant{sectionstarter}{sectionstarter}
\setinterfaceconstant{sectionstopper}{sectionstopper}
\setinterfaceconstant{separator}{separator}
+\setinterfaceconstant{separatorcolor}{separatorcolor}
+\setinterfaceconstant{separatorstyle}{separatorstyle}
\setinterfaceconstant{set}{set}
\setinterfaceconstant{setups}{setups}
\setinterfaceconstant{shrink}{shrink}
@@ -982,7 +1010,11 @@
\setinterfaceconstant{sidespacebefore}{spatiulateralinainte}
\setinterfaceconstant{sign}{semn}
\setinterfaceconstant{size}{dimensiune}
+\setinterfaceconstant{slantedfeatures}{slantedfeatures}
+\setinterfaceconstant{slantedfont}{slantedfont}
\setinterfaceconstant{small}{mic}
+\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures}
+\setinterfaceconstant{smallcapsfont}{smallcapsfont}
\setinterfaceconstant{solution}{solution}
\setinterfaceconstant{sort}{sort}
\setinterfaceconstant{sorttype}{sorttype}
@@ -1013,6 +1045,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/Master/texmf-dist/tex/context/base/mult-sys.mkiv b/Master/texmf-dist/tex/context/base/mult-sys.mkiv
index f0db9fa6708..8c1bff2bc40 100644
--- a/Master/texmf-dist/tex/context/base/mult-sys.mkiv
+++ b/Master/texmf-dist/tex/context/base/mult-sys.mkiv
@@ -56,6 +56,7 @@
\definesystemconstant {latin} \definesystemconstant {la}
\definesystemconstant {lithuanian} \definesystemconstant {lt}
\definesystemconstant {bokmal} \definesystemconstant {nb}
+\definesystemconstant {malayalam} \definesystemconstant {ml}
\definesystemconstant {norwegian} \definesystemconstant {no}
\definesystemconstant {nynorsk} \definesystemconstant {nn}
\definesystemconstant {polish} \definesystemconstant {pl}
@@ -163,6 +164,9 @@
\definesystemconstant {both}
+\definesystemconstant {internal}
+\definesystemconstant {external}
+
\definesystemconstant {attribute}
\definesystemconstant {none}
@@ -558,11 +562,12 @@
%D calls to other files), old macro's, to garantee compatibility and new macro's noy
%D yet present in the format.
-\definefileconstant {errfilename} {cont-err}
-\definefileconstant {sysfilename} {cont-sys}
-\definefileconstant {newfilename} {cont-new}
-\definefileconstant {locfilename} {cont-loc}
-\definefileconstant {expfilename} {cont-exp}
+\definefileconstant {sysfilename} {cont-sys.mkiv}
+\definefileconstant {newfilename} {cont-new.mkiv}
+\definefileconstant {locfilename} {cont-loc.mkiv}
+\definefileconstant {expfilename} {cont-exp.mkiv}
+\definefileconstant {fntfilename} {cont-fnt.mkiv} % not yet used
+\definefileconstant {gdsfilename} {cont-fnt.lfg} % not yet used
%D The setup files for the language, font, color and special subsystems have a common
%D prefix. This means that we have at most three characters for unique filenames.
diff --git a/Master/texmf-dist/tex/context/base/node-acc.lua b/Master/texmf-dist/tex/context/base/node-acc.lua
index 4380ec3a451..59fa031bf70 100644
--- a/Master/texmf-dist/tex/context/base/node-acc.lua
+++ b/Master/texmf-dist/tex/context/base/node-acc.lua
@@ -11,10 +11,27 @@ local nodes, node = nodes, node
local nodecodes = nodes.nodecodes
local tasks = nodes.tasks
-local traverse_nodes = node.traverse
-local traverse_id = node.traverse_id
-local copy_node = node.copy
-local free_nodelist = node.flush_list
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+local tonode = nodes.tonode
+
+local getid = nuts.getid
+local getfield = nuts.getfield
+local getattr = nuts.getattr
+local getlist = nuts.getlist
+local getchar = nuts.getchar
+local getnext = nuts.getnext
+
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
+local copy_node = nuts.copy
+local free_nodelist = nuts.flush_list
+local insert_after = nuts.insert_after
+
+local new_gluespec = nuts.pool.gluespec -- temp hack
local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
@@ -29,112 +46,133 @@ local threshold = 65536
-- todo: nbsp etc
-- todo: collapse kerns
+-- p_id
+
local function injectspaces(head)
- local p
+ local p, p_id
local n = head
while n do
- local id = n.id
+ local id = getid(n)
if id == glue_code then -- todo: check for subtype related to spacing (13/14 but most seems to be 0)
---~ if n.spec.width > 0 then -- threshold
- if p and p.id == glyph_code then
+ -- if getfield(getfield(n,"spec"),"width") > 0 then -- threshold
+-- if p and p_id == glyph_code then
+ if p and getid(p) == glyph_code then
local g = copy_node(p)
- local c = g.components
+ local c = getfield(g,"components")
if c then -- it happens that we copied a ligature
free_nodelist(c)
- g.components = nil
- g.subtype = 256
+ setfield(g,"components",nil)
+ setfield(g,"subtype",256)
end
- local a = n[a_characters]
- local s = copy_node(n.spec)
- g.char, n.spec = 32, s
- p.next, g.prev = g, p
- g.next, n.prev = n, g
- s.width = s.width - g.width
+ local a = getattr(n,a_characters)
+ -- local s = copy_node(getfield(n,"spec"))
+ -- this will be fixed in luatex but for now a temp hack (zero test)
+ local s = getfield(n,"spec")
+ s = s == 0 and new_gluespec(0) or copy_node(s)
+ --
+ setfield(g,"char",32)
+ setfield(n,"spec",s)
+ -- insert_after(p,p,g)
+ setfield(p,"next",g)
+ setfield(g,"prev",p)
+ setfield(g,"next",n)
+ setfield(n,"prev",g)
+ setfield(s,"width",getfield(s,"width") - getfield(g,"width"))
if a then
- g[a_characters] = a
+ setattr(g,a_characters,a)
end
- s[a_characters] = 0
- n[a_characters] = 0
+ setattr(s,a_characters,0)
+ setattr(n,a_characters,0)
end
---~ end
+ -- end
elseif id == hlist_code or id == vlist_code then
- injectspaces(n.list,attribute)
+ injectspaces(getlist(n),attribute)
-- elseif id == kern_code then -- the backend already collapses
-- local first = n
-- while true do
- -- local nn = n.next
- -- if nn and nn.id == kern_code then
+ -- local nn = getnext(n)
+ -- if nn and getid(nn) == kern_code then
-- -- maybe we should delete kerns but who cares at this stage
- -- first.kern = first.kern + nn.kern
- -- nn.kern = 0
+ -- setfield(first,"kern",getfield(first,"kern") + getfield(nn,"kern")
+ -- setfield(nn,"kern",0)
-- n = nn
-- else
-- break
-- end
-- end
end
+ p_id = id
p = n
- n = n.next
+ n = getnext(n)
end
- return head, true
+ return head, true -- always done anyway
end
-nodes.handlers.accessibility = injectspaces
+nodes.handlers.accessibility = function(head)
+ local head, done = injectspaces(tonut(head))
+ return tonode(head), done
+end
-- todo:
---~ local a_hyphenated = attributes.private('hyphenated')
---~
---~ local hyphenated, codes = { }, { }
---~
---~ local function compact(n)
---~ local t = { }
---~ for n in traverse_id(glyph_code,n) do
---~ t[#t+1] = utfchar(n.char) -- check for unicode
---~ end
---~ return concat(t,"")
---~ end
---~
---~ local function injectspans(head)
---~ for n in traverse_nodes(head) do
---~ local id = n.id
---~ if id == disc then
---~ local r, p = n.replace, n.pre
---~ if r and p then
---~ local str = compact(r)
---~ local hsh = hyphenated[str]
---~ if not hsh then
---~ hsh = #codes + 1
---~ hyphenated[str] = hsh
---~ codes[hsh] = str
---~ end
---~ n[a_hyphenated] = hsh
---~ end
---~ elseif id == hlist_code or id == vlist_code then
---~ injectspans(n.list)
---~ end
---~ end
---~ return head, true
---~ end
---~
---~ nodes.injectspans = injectspans
---~
---~ tasks.appendaction("processors", "words", "nodes.injectspans")
---~
---~ local function injectspans(head)
---~ for n in traverse_nodes(head) do
---~ local id = n.id
---~ if id == disc then
---~ local a = n[a_hyphenated]
---~ if a then
---~ local str = codes[a]
---~ local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str)))
---~ local e = new_pdfliteral("EMC")
---~ node.insert_before(head,n,b)
---~ node.insert_after(head,n,e)
---~ end
---~ elseif id == hlist_code or id == vlist_code then
---~ injectspans(n.list)
---~ end
---~ end
---~ end
+-- local a_hyphenated = attributes.private('hyphenated')
+--
+-- local hyphenated, codes = { }, { }
+--
+-- local function compact(n)
+-- local t = { }
+-- for n in traverse_id(glyph_code,n) do
+-- t[#t+1] = utfchar(getchar(n)) -- check for unicode
+-- end
+-- return concat(t,"")
+-- end
+--
+-- local function injectspans(head)
+-- local done = false
+-- for n in traverse_nodes(tonuts(head)) do
+-- local id = getid(n)
+-- if id == disc then
+-- local r = getfield(n,"replace")
+-- local p = getfield(n,"pre")
+-- if r and p then
+-- local str = compact(r)
+-- local hsh = hyphenated[str]
+-- if not hsh then
+-- hsh = #codes + 1
+-- hyphenated[str] = hsh
+-- codes[hsh] = str
+-- end
+-- setattr(n,a_hyphenated,hsh)
+-- done = true
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- injectspans(getlist(n))
+-- end
+-- end
+-- return tonodes(head), done
+-- end
+--
+-- nodes.injectspans = injectspans
+--
+-- tasks.appendaction("processors", "words", "nodes.injectspans")
+--
+-- local function injectspans(head)
+-- local done = false
+-- for n in traverse_nodes(tonut(head)) do
+-- local id = getid(n)
+-- if id == disc then
+-- local a = getattr(n,a_hyphenated)
+-- if a then
+-- local str = codes[a]
+-- local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str)))
+-- local e = new_pdfliteral("EMC")
+-- insert_before(head,n,b)
+-- insert_after(head,n,e)
+-- done = true
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- injectspans(getlist(n))
+-- end
+-- end
+-- return tonodes(head), done
+-- end
diff --git a/Master/texmf-dist/tex/context/base/node-aux.lua b/Master/texmf-dist/tex/context/base/node-aux.lua
index e3fc7ad6fe8..49911625822 100644
--- a/Master/texmf-dist/tex/context/base/node-aux.lua
+++ b/Master/texmf-dist/tex/context/base/node-aux.lua
@@ -22,82 +22,153 @@ local vlist_code = nodecodes.vlist
local attributelist_code = nodecodes.attributelist -- temporary
local math_code = nodecodes.math
-local nodepool = nodes.pool
-
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local vianuts = nuts.vianuts
+
+local getbox = nuts.getbox
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getlist = nuts.getlist
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
+local free_node = nuts.free
+local hpack_nodes = nuts.hpack
+local unset_attribute = nuts.unset_attribute
+local first_glyph = nuts.first_glyph
+local copy_node = nuts.copy
+local copy_node_list = nuts.copy_list
+local find_tail = nuts.tail
+local insert_node_after = nuts.insert_after
+local isnode = nuts.is_node
+local getbox = nuts.getbox
+
+local nodes_traverse_id = nodes.traverse_id
+local nodes_first_glyph = nodes.first_glyph
+
+local nodepool = nuts.pool
local new_glue = nodepool.glue
local new_glyph = nodepool.glyph
-local traverse_nodes = node.traverse
-local traverse_id = node.traverse_id
-local free_node = node.free
-local hpack_nodes = node.hpack
-local unset_attribute = node.unset_attribute
-local first_glyph = node.first_glyph or node.first_character
-local copy_node = node.copy
-local copy_node_list = node.copy_list
-local slide_nodes = node.slide
-local insert_node_after = node.insert_after
-local isnode = node.is_node
-
local unsetvalue = attributes.unsetvalue
local current_font = font.current
-local texbox = tex.box
+local texsetbox = tex.setbox
local report_error = logs.reporter("node-aux:error")
-function nodes.repackhlist(list,...)
---~ nodes.showsimplelist(list)
+-- At some point we figured that copying before using was the safest bet
+-- when dealing with boxes at the tex end. This is because tex also needs
+-- to manage the grouping (i.e. savestack). However, there is an easy
+-- solution that keeps the tex end happy as tex.setbox deals with this. The
+-- overhead of one temporary list node is neglectable.
+--
+-- function tex.takebox(id)
+-- local box = tex.getbox(id)
+-- if box then
+-- local copy = node.copy(box)
+-- local list = box.list
+-- copy.list = list
+-- box.list = nil
+-- tex.setbox(id,nil)
+-- return copy
+-- end
+-- end
+
+local function takebox(id)
+ local box = getbox(id)
+ if box then
+ local copy = copy_node(box)
+ local list = getlist(box)
+ setfield(copy,"list",list)
+ setfield(box,"list",nil)
+ texsetbox(id,nil)
+ return copy
+ end
+end
+
+function nodes.takebox(id)
+ local b = takebox(id)
+ if b then
+ return tonode(b)
+ end
+end
+
+nuts.takebox = takebox
+tex.takebox = nodes.takebox -- sometimes more clear
+
+-- so far
+
+local function repackhlist(list,...)
local temp, b = hpack_nodes(list,...)
- list = temp.list
- temp.list = nil
+ list = getlist(temp)
+ setfield(temp,"list",nil)
free_node(temp)
return list, b
end
+nuts.repackhlist = repackhlist
+
+function nodes.repackhlist(list,...)
+ local list, b = repackhlist(tonut(list),...)
+ return tonode(list), b
+end
+
local function set_attributes(head,attr,value)
for n in traverse_nodes(head) do
- n[attr] = value
- local id = n.id
+ setattr(n,attr,value)
+ local id = getid(n)
if id == hlist_node or id == vlist_node then
- set_attributes(n.list,attr,value)
+ set_attributes(getlist(n),attr,value)
end
end
end
local function set_unset_attributes(head,attr,value)
for n in traverse_nodes(head) do
- if not n[attr] then
- n[attr] = value
+ if not getattr(n,attr) then
+ setattr(n,attr,value)
end
- local id = n.id
+ local id = getid(n)
if id == hlist_code or id == vlist_code then
- set_unset_attributes(n.list,attr,value)
+ set_unset_attributes(getlist(n),attr,value)
end
end
end
local function unset_attributes(head,attr)
for n in traverse_nodes(head) do
- n[attr] = unsetvalue
- local id = n.id
+ setattr(n,attr,unsetvalue)
+ local id = getid(n)
if id == hlist_code or id == vlist_code then
- unset_attributes(n.list,attr)
+ unset_attributes(getlist(n),attr)
end
end
end
-nodes.setattribute = node.set_attribute
-nodes.getattribute = node.has_attribute
-nodes.unsetattribute = node.unset_attribute
-nodes.has_attribute = node.has_attribute
+-- for old times sake
-nodes.firstglyph = first_glyph
-nodes.setattributes = set_attributes
-nodes.setunsetattributes = set_unset_attributes
-nodes.unsetattributes = unset_attributes
+nuts.setattribute = nuts.setattr nodes.setattribute = nodes.setattr
+nuts.getattribute = nuts.getattr nodes.getattribute = nodes.getattr
+nuts.unsetattribute = nuts.unset_attribute nodes.unsetattribute = nodes.unset_attribute
+nuts.has_attribute = nuts.has_attribute nodes.has_attribute = nodes.has_attribute
+nuts.firstglyph = nuts.first_glyph nodes.firstglyph = nodes.first_glyph
+nuts.setattributes = set_attributes nodes.setattributes = vianuts(set_attributes)
+nuts.setunsetattributes = set_unset_attributes nodes.setunsetattributes = vianuts(set_unset_attributes)
+nuts.unsetattributes = unset_attributes nodes.unsetattributes = vianuts(unset_attributes)
+
+-- history:
+--
-- function nodes.is_skipable(a,id) -- skipable nodes at the margins during character protrusion
-- return (
-- id ~= glyph_node
@@ -106,29 +177,26 @@ nodes.unsetattributes = unset_attributes
-- or id == adjust_node
-- or id == penalty_node
-- or (id == glue_node and a.spec.writable)
--- or (id == disc_node and a.pre == nil and a.post == nil and a.replace == nil)
--- or (id == math_node and a.surround == 0)
--- or (id == kern_node and (a.kern == 0 or a.subtype == NORMAL))
--- or (id == hlist_node and a.width == 0 and a.height == 0 and a.depth == 0 and a.list == nil)
--- or (id == whatsit_node and a.subtype ~= pdf_refximage_node and a.subtype ~= pdf_refxform_node)
+-- or (id == disc_node and getfield(a,"pre") == nil and getfield(a,"post") == nil and getfield(a,"replace") == nil)
+-- or (id == math_node and getfield(a,"surround") == 0)
+-- or (id == kern_node and (getfield(a,"kern") == 0 or getsubtype(subtype) == NORMAL))
+-- or (id == hlist_node and getfield(a,"width") == 0 and getfield(a,"height") == 0 and getfield(a,"depth") == 0 and getlist(a) == nil)
+-- or (id == whatsit_node and getsubtype(a) ~= pdf_refximage_node and getsubtype(a) ~= pdf_refxform_node)
-- )
-- end
-
--- history:
---
--
-- local function glyph_width(a)
--- local ch = chardata[a.font][a.char]
+-- local ch = chardata[getfont(a)][getchar(a)]
-- return (ch and ch.width) or 0
-- end
--
-- local function glyph_total(a)
--- local ch = chardata[a.font][a.char]
+-- local ch = chardata[getfont(a)][getchar(a)]
-- return (ch and (ch.height+ch.depth)) or 0
-- end
--
-- local function non_discardable(a) -- inline
--- return a.id < math_node -- brrrr
+-- return getid(id) < math_node -- brrrr
-- end
--
-- local function calculate_badness(t,s)
@@ -183,8 +251,36 @@ nodes.unsetattributes = unset_attributes
-- return -u
-- end
-- end
+--
+-- if not node.end_of_math then
+-- function node.end_of_math(n)
+-- for n in traverse_id(math_code,getnext(next)) do
+-- return n
+-- end
+-- end
+-- end
+--
+-- nodes.endofmath = node.end_of_math
+--
+-- local function firstline(n)
+-- while n do
+-- local id = getid(n)
+-- if id == hlist_code then
+-- if getsubtype(n) == line_code then
+-- return n
+-- else
+-- return firstline(getlist(n))
+-- end
+-- elseif id == vlist_code then
+-- return firstline(getlist(n))
+-- end
+-- n = getnext(n)
+-- end
+-- end
+--
+-- nodes.firstline = firstline
-function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
+function nuts.firstcharacter(n,untagged) -- tagged == subtype > 255
if untagged then
return first_glyph(n)
else
@@ -194,44 +290,38 @@ function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
end
end
-function nodes.firstcharinbox(n)
- local l = texbox[n].list
+-- function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
+-- if untagged then
+-- return nodes_first_glyph(n)
+-- else
+-- for g in nodes_traverse_id(glyph_code,n) do
+-- return g
+-- end
+-- end
+-- end
+
+local function firstcharinbox(n)
+ local l = getlist(getbox(n))
if l then
for g in traverse_id(glyph_code,l) do
- return g.char
+ return getchar(g)
end
end
return 0
end
-if not node.end_of_math then
- function node.end_of_math(n)
- for n in traverse_id(math_code,n.next) do
- return n
- end
+nuts .firstcharinbox = firstcharinbox
+nodes.firstcharinbox = firstcharinbox
+nodes.firstcharacter = vianuts(firstcharacter)
+
+function commands.buildtextaccent(n)
+ local char = firstcharinbox(n)
+ if char > 0 then
+ -- context.accent(false,char)
+ context([[\accent%s\relax]],char)
end
end
-nodes.endofmath = node.end_of_math
-
--- local function firstline(n)
--- while n do
--- local id = n.id
--- if id == hlist_code then
--- if n.subtype == line_code then
--- return n
--- else
--- return firstline(n.list)
--- end
--- elseif id == vlist_code then
--- return firstline(n.list)
--- end
--- n = n.next
--- end
--- end
-
--- nodes.firstline = firstline
-
-- this depends on fonts, so we have a funny dependency ... will be
-- sorted out .. we could make tonodes a plugin into this
@@ -242,10 +332,8 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
local head, tail, space, fnt, template = nil, nil, nil, nil, nil
if not fnt then
fnt = current_font()
- elseif type(fnt) ~= "number" and fnt.id == "glyph" then
- fnt, template = nil, fnt
- -- else
- -- already a number
+ elseif type(fnt) ~= "number" and getid(fnt) == glyph_code then -- so it has to be a real node
+ fnt, template = nil, tonut(fnt)
end
for s in utfvalues(str) do
local n
@@ -259,12 +347,12 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
end
elseif template then
n = copy_node(template)
- n.char = s
+ setvalue(n,"char",s)
else
n = new_glyph(fnt,s)
end
if attr then -- normally false when template
- n.attr = copy_node_list(attr)
+ setfield(n,"attr",copy_node_list(attr))
end
if head then
insert_node_after(head,tail,n)
@@ -276,69 +364,130 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
return head, tail
end
-nodes.tonodes = tonodes
+nuts.tonodes = tonodes
+
+nodes.tonodes = function(str,fnt,attr)
+ local head, tail = tonodes(str,fnt,attr)
+ return tonode(head), tonode(tail)
+end
+
+-- local function link(list,currentfont,currentattr,head,tail)
+-- for i=1,#list do
+-- local n = list[i]
+-- if n then
+-- local tn = isnode(n)
+-- if not tn then
+-- local tn = type(n)
+-- if tn == "number" then
+-- if not currentfont then
+-- currentfont = current_font()
+-- end
+-- local h, t = tonodes(tostring(n),currentfont,currentattr)
+-- if not h then
+-- -- skip
+-- elseif not head then
+-- head = h
+-- tail = t
+-- else
+-- setfield(tail,"next",h)
+-- setfield(h,"prev",t)
+-- tail = t
+-- end
+-- elseif tn == "string" then
+-- if #tn > 0 then
+-- if not currentfont then
+-- currentfont = current_font()
+-- end
+-- local h, t = tonodes(n,currentfont,currentattr)
+-- if not h then
+-- -- skip
+-- elseif not head then
+-- head, tail = h, t
+-- else
+-- setfield(tail,"next",h)
+-- setfield(h,"prev",t)
+-- tail = t
+-- end
+-- end
+-- elseif tn == "table" then
+-- if #tn > 0 then
+-- if not currentfont then
+-- currentfont = current_font()
+-- end
+-- head, tail = link(n,currentfont,currentattr,head,tail)
+-- end
+-- end
+-- elseif not head then
+-- head = n
+-- tail = find_tail(n)
+-- elseif getid(n) == attributelist_code then
+-- -- weird case
+-- report_error("weird node type in list at index %s:",i)
+-- for i=1,#list do
+-- local l = list[i]
+-- report_error("%3i: %s %S",i,getid(l) == attributelist_code and "!" or ">",l)
+-- end
+-- os.exit()
+-- else
+-- setfield(tail,"next",n)
+-- setfield(n,"prev",tail)
+-- if getnext(n) then
+-- tail = find_tail(n)
+-- else
+-- tail = n
+-- end
+-- end
+-- else
+-- -- permitting nil is convenient
+-- end
+-- end
+-- return head, tail
+-- end
-local function link(list,currentfont,currentattr,head,tail)
+local function link(list,currentfont,currentattr,head,tail) -- an oldie, might be replaced
for i=1,#list do
local n = list[i]
if n then
- local tn = isnode(n)
- if not tn then
- local tn = type(n)
- if tn == "number" then
+ local tn = type(n)
+ if tn == "string" then
+ if #tn > 0 then
if not currentfont then
currentfont = current_font()
end
- local h, t = tonodes(tostring(n),currentfont,currentattr)
+ local h, t = tonodes(n,currentfont,currentattr)
if not h then
-- skip
elseif not head then
head, tail = h, t
else
- tail.next, h.prev, tail = h, t, t
- end
- elseif tn == "string" then
- if #tn > 0 then
- if not currentfont then
- currentfont = current_font()
- end
- local h, t = tonodes(n,currentfont,currentattr)
- if not h then
- -- skip
- elseif not head then
- head, tail = h, t
- else
- tail.next, h.prev, tail = h, t, t
- end
+ setfield(tail,"next",h)
+ setfield(h,"prev",t)
+ tail = t
end
- elseif tn == "table" then
- if #tn > 0 then
- if not currentfont then
- currentfont = current_font()
- end
- head, tail = link(n,currentfont,currentattr,head,tail)
+ end
+ elseif tn == "table" then
+ if #tn > 0 then
+ if not currentfont then
+ currentfont = current_font()
end
+ head, tail = link(n,currentfont,currentattr,head,tail)
end
elseif not head then
head = n
- if n.next then
- tail = slide_nodes(n)
- else
- tail = n
- end
- elseif n.id == attributelist_code then
+ tail = find_tail(n)
+ elseif getid(n) == attributelist_code then
-- weird case
report_error("weird node type in list at index %s:",i)
for i=1,#list do
local l = list[i]
- report_error("%3i: %s %S",i,l.id == attributelist_code and "!" or ">",l)
+ report_error("%3i: %s %S",i,getid(l) == attributelist_code and "!" or ">",l)
end
os.exit()
else
- tail.next = n
- n.prev = tail
- if n.next then
- tail = slide_nodes(n)
+ setfield(tail,"next",n)
+ setfield(n,"prev",tail)
+ if getnext(n) then
+ tail = find_tail(n)
else
tail = n
end
@@ -350,17 +499,22 @@ local function link(list,currentfont,currentattr,head,tail)
return head, tail
end
-nodes.link = link
+nuts.link = link
+
+nodes.link = function(list,currentfont,currentattr,head,tail)
+ local head, tail = link(list,currentfont,currentattr,tonut(head),tonut(tail))
+ return tonode(head), tonode(tail)
+end
local function locate(start,wantedid,wantedsubtype)
for n in traverse_nodes(start) do
- local id = n.id
+ local id = getid(n)
if id == wantedid then
- if not wantedsubtype or n.subtype == wantedsubtype then
+ if not wantedsubtype or getsubtype(n) == wantedsubtype then
return n
end
elseif id == hlist_code or id == vlist_code then
- local found = locate(n.list,wantedid,wantedsubtype)
+ local found = locate(getlist(n),wantedid,wantedsubtype)
if found then
return found
end
@@ -368,22 +522,30 @@ local function locate(start,wantedid,wantedsubtype)
end
end
-nodes.locate = locate
+nuts.locate = locate
-function nodes.concat(list)
- local head, tail
- for i=1,#list do
- local li = list[i]
- if not li then
- -- skip
- elseif head then
- tail.next = li
- li.prev = tail
- tail = li.next and slide_nodes(li) or li
- else
- head = li
- tail = li.next and slide_nodes(li) or li
- end
- end
- return head, tail
+nodes.locate = function(start,wantedid,wantedsubtype)
+ local found = locate(tonut(start),wantedid,wantedsubtype)
+ return found and tonode(found)
end
+
+-- I have no use for this yet:
+--
+-- \skip0=10pt plus 2pt minus 2pt
+-- \cldcontext{"\letterpercent p",tex.stretch_amount(tex.skip[0],1000)} -- 14.30887pt
+--
+-- local gluespec_code = nodes.nodecodes.gluespec
+--
+-- function tex.badness_to_ratio(badness)
+-- return (badness/100)^(1/3)
+-- end
+--
+-- function tex.stretch_amount(skip,badness) -- node no nut
+-- if skip.id == gluespec_code then
+-- return skip.width + (badness and (badness/100)^(1/3) or 1) * skip.stretch
+-- else
+-- return 0
+-- end
+-- end
+
+
diff --git a/Master/texmf-dist/tex/context/base/node-bck.lua b/Master/texmf-dist/tex/context/base/node-bck.lua
index feaa2c6849d..4b7b4a0643d 100644
--- a/Master/texmf-dist/tex/context/base/node-bck.lua
+++ b/Master/texmf-dist/tex/context/base/node-bck.lua
@@ -11,6 +11,8 @@ if not modules then modules = { } end modules ['node-bck'] = {
local attributes, nodes, node = attributes, nodes, node
+local tasks = nodes.tasks
+
local nodecodes = nodes.nodecodes
local listcodes = nodes.listcodes
@@ -19,11 +21,25 @@ local vlist_code = nodecodes.vlist
local glyph_code = nodecodes.glyph
local cell_code = listcodes.cell
-local traverse = node.traverse
-local traverse_id = node.traverse_id
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
+
+local traverse = nuts.traverse
+local traverse_id = nuts.traverse_id
-local nodepool = nodes.pool
-local tasks = nodes.tasks
local new_rule = nodepool.rule
local new_glue = nodepool.glue
@@ -37,50 +53,50 @@ local a_alignbackground = attributes.private('alignbackground')
local function add_backgrounds(head) -- rather old code .. to be redone
local current = head
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code or id == vlist_code then
- local list = current.list
+ local list = getlist(current)
if list then
local head = add_backgrounds(list)
if head then
- current.list = head
+ setfield(current,"list",head)
list = head
end
end
- local width = current.width
+ local width = getfield(current,"width")
if width > 0 then
- local background = current[a_background]
+ local background = getattr(current,a_background)
if background then
-- direct to hbox
-- colorspace is already set so we can omit that and stick to color
- local mode = current[a_colorspace]
+ local mode = getattr(current,a_colorspace)
if mode then
- local height = current.height
- local depth = current.depth
+ local height = getfield(current,"height")
+ local depth = getfield(current,"depth")
local skip = id == hlist_code and width or (height + depth)
local glue = new_glue(-skip)
local rule = new_rule(width,height,depth)
- local color = current[a_color]
- local transparency = current[a_transparency]
- rule[a_colorspace] = mode
+ local color = getattr(current,a_color)
+ local transparency = getattr(current,a_transparency)
+ setattr(rule,a_colorspace,mode)
if color then
- rule[a_color] = color
+ setattr(rule,a_color,color)
end
if transparency then
- rule[a_transparency] = transparency
+ setattr(rule,a_transparency,transparency)
end
- rule.next = glue
- glue.prev = rule
+ setfield(rule,"next",glue)
+ setfield(glue,"prev",rule)
if list then
- glue.next = list
- list.prev = glue
+ setfield(glue,"next",list)
+ setfield(list,"prev",glue)
end
- current.list = rule
+ setfield(current,"list",rule)
end
end
end
end
- current = current.next
+ current = getnext(current)
end
return head, true
end
@@ -88,16 +104,16 @@ end
local function add_alignbackgrounds(head)
local current = head
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code then
- local list = current.list
+ local list = getlist(current)
if not list then
-- no need to look
- elseif current.subtype == cell_code then
+ elseif getsubtype(current) == cell_code then
local background = nil
local found = nil
-- for l in traverse(list) do
- -- background = l[a_alignbackground]
+ -- background = getattr(l,a_alignbackground)
-- if background then
-- found = l
-- break
@@ -106,7 +122,7 @@ local function add_alignbackgrounds(head)
-- we know that it's a fake hlist (could be user node)
-- but we cannot store tables in user nodes yet
for l in traverse_id(hpack_code,list) do
- background = l[a_alignbackground]
+ background = getattr(l,a_alignbackground)
if background then
found = l
end
@@ -115,28 +131,28 @@ local function add_alignbackgrounds(head)
--
if background then
-- current has subtype 5 (cell)
- local width = current.width
+ local width = getfield(current,"width")
if width > 0 then
- local mode = found[a_colorspace]
+ local mode = getattr(found,a_colorspace)
if mode then
local glue = new_glue(-width)
- local rule = new_rule(width,current.height,current.depth)
- local color = found[a_color]
- local transparency = found[a_transparency]
- rule[a_colorspace] = mode
+ local rule = new_rule(width,getfield(current,"height"),getfield(current,"depth"))
+ local color = getattr(found,a_color)
+ local transparency = getattr(found,a_transparency)
+ setattr(rule,a_colorspace,mode)
if color then
- rule[a_color] = color
+ setattr(rule,a_color,color)
end
if transparency then
- rule[a_transparency] = transparency
+ setattr(rule,a_transparency,transparency)
end
- rule.next = glue
- glue.prev = rule
+ setfield(rule,"next",glue)
+ setfield(glue,"prev",rule)
if list then
- glue.next = list
- list.prev = glue
+ setfield(glue,"next",list)
+ setfield(list,"prev",glue)
end
- current.list = rule
+ setfield(current,"list",rule)
end
end
end
@@ -144,18 +160,23 @@ local function add_alignbackgrounds(head)
add_alignbackgrounds(list)
end
elseif id == vlist_code then
- local list = current.list
+ local list = getlist(current)
if list then
add_alignbackgrounds(list)
end
end
- current = current.next
+ current = getnext(current)
end
return head, true
end
-nodes.handlers.backgrounds = add_backgrounds
-nodes.handlers.alignbackgrounds = add_alignbackgrounds
+-- nodes.handlers.backgrounds = add_backgrounds
+-- nodes.handlers.alignbackgrounds = add_alignbackgrounds
+
+nodes.handlers.backgrounds = function(head) local head, done = add_backgrounds (tonut(head)) return tonode(head), done end
+nodes.handlers.alignbackgrounds = function(head) local head, done = add_alignbackgrounds(tonut(head)) return tonode(head), done end
+
+-- elsewhere: needs checking
tasks.appendaction("shipouts","normalizers","nodes.handlers.backgrounds")
tasks.appendaction("shipouts","normalizers","nodes.handlers.alignbackgrounds")
diff --git a/Master/texmf-dist/tex/context/base/node-fin.lua b/Master/texmf-dist/tex/context/base/node-fin.lua
index 2e62ebcb5aa..250035f3958 100644
--- a/Master/texmf-dist/tex/context/base/node-fin.lua
+++ b/Master/texmf-dist/tex/context/base/node-fin.lua
@@ -8,126 +8,63 @@ if not modules then modules = { } end modules ['node-fin'] = {
-- this module is being reconstructed
-- local functions, only slightly slower
+--
+-- leaders are also triggers
local next, type, format = next, type, string.format
local attributes, nodes, node = attributes, nodes, node
-local copy_node = node.copy
-local find_tail = node.slide
-
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local glue_code = nodecodes.glue
-local rule_code = nodecodes.rule
-local whatsit_code = nodecodes.whatsit
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-
-local pdfliteral_code = whatcodes.pdfliteral
-
-local states = attributes.states
-local numbers = attributes.numbers
-local a_trigger = attributes.private('trigger')
-local triggering = false
-
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local loadstripped = utilities.lua.loadstripped
-local unsetvalue = attributes.unsetvalue
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getleader = nuts.getleader
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local copy_node = nuts.copy
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local glue_code = nodecodes.glue
+local rule_code = nodecodes.rule
+local whatsit_code = nodecodes.whatsit
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+
+local pdfliteral_code = whatcodes.pdfliteral
+
+local states = attributes.states
+local numbers = attributes.numbers
+local a_trigger = attributes.private('trigger')
+local triggering = false
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local loadstripped = utilities.lua.loadstripped
+local unsetvalue = attributes.unsetvalue
-- these two will be like trackers
-function states.enabletriggering()
- triggering = true
-end
-function states.disabletriggering()
- triggering = false
-end
-
--- the following code is no longer needed due to the new backend
--- but we keep it around for a while as an example
---
--- states.collected = states.collected or { }
---
--- storage.register("states/collected", states.collected, "states.collected")
---
--- local collected = states.collected
---
--- function states.collect(str)
--- collected[#collected+1] = str
--- end
---
--- function states.flush()
--- if #collected > 0 then
--- for i=1,#collected do
--- context(collected[i]) -- we're in context mode anyway
--- end
--- collected = { }
--- states.collected = collected
--- end
--- end
---
--- function states.check()
--- logs.report("states",concat(collected,"\n"))
--- end
-
--- we used to do the main processor loop here and call processor for each node
--- but eventually this was too much a slow down (1 sec on 23 for 120 pages mk)
--- so that we moved looping to the processor itself; this may lead to a bit of
--- duplicate code once that we have more state handlers
-
--- local function process_attribute(head,plugin) -- head,attribute,enabled,initializer,resolver,processor,finalizer
--- local namespace = plugin.namespace
--- if namespace.enabled ~= false then -- this test will go away
--- starttiming(attributes) -- in principle we could delegate this to the main caller
--- local done, used, ok = false, nil, false
--- local attribute = namespace.attribute or numbers[plugin.name] -- todo: plugin.attribute
--- local processor = plugin.processor
--- if processor then
--- local initializer = plugin.initializer
--- local resolver = plugin.resolver
--- local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip !
--- if initializer then
--- initializer(namespace,attribute,head)
--- end
--- head, ok = processor(namespace,attribute,head,inheritance)
--- if ok then
--- local finalizer = plugin.finalizer
--- if finalizer then
--- head, ok, used = finalizer(namespace,attribute,head)
--- if used then
--- local flusher = plugin.flusher
--- if flusher then
--- head = flusher(namespace,attribute,head,used)
--- end
--- end
--- end
--- done = true
--- end
--- end
--- stoptiming(attributes)
--- return head, done
--- else
--- return head, false
--- end
--- end
---
--- function nodes.installattributehandler(plugin) -- we need to avoid this nested function
--- return function(head)
--- return process_attribute(head,plugin)
--- end
--- end
-
--- An experiment: lean and mean functions. It is not really faster but
--- with upcoming functionality it might make a difference, e.g. features
--- like 'casing' and 'italics' can be called a lot so there it makes sense.
+function states.enabletriggering () triggering = true end
+function states.disabletriggering() triggering = false end
nodes.plugindata = nil
+-- inheritance: -0x7FFFFFFF -- we can best use nil and skip !
+
local template = [[
local plugin = nodes.plugindata
local starttiming = statistics.starttiming
@@ -146,8 +83,10 @@ if not processor then
elseif initializer or finalizer or resolver then
return function(head)
starttiming(attributes)
- local done, used, ok = false, nil, false
- local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip !
+ local done, used, ok, inheritance = false, nil, false, nil
+ if resolver then
+ inheritance = resolver()
+ end
if initializer then
initializer(namespace,attribute,head)
end
@@ -180,10 +119,13 @@ function nodes.installattributehandler(plugin)
return loadstripped(template)()
end
--- the injectors
+-- for the moment:
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
+local function copied(n)
+ return copy_node(tonut(n))
+end
+
+-- the injectors
local nsdata, nsnone, nslistwise, nsforced, nsselector, nstrigger
local current, current_selector, done = 0, 0, false -- nb, stack has a local current !
@@ -210,326 +152,25 @@ end
function states.finalize(namespace,attribute,head) -- is this one ok?
if current > 0 and nsnone then
- local id = head.id
+ head = tonut(head)
+ local id = getid(head)
if id == hlist_code or id == vlist_code then
- local list = head.list
+ local list = getlist(head)
if list then
- head.list = insert_node_before(list,list,copy_node(nsnone))
+ list = insert_node_before(list,list,copied(nsnone)) -- two return values
+ setfield(head,"list",list)
end
else
- head = insert_node_before(head,head,copy_node(nsnone))
+ head = insert_node_before(head,head,copied(nsnone))
end
- return head, true, true
+ return tonode(head), true, true
end
return head, false, false
end
-- disc nodes can be ignored
-- we need to deal with literals too (reset as well as oval)
--- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then
-
--- local function process(namespace,attribute,head,inheritance,default) -- one attribute
--- local stack, done = head, false
--- while stack do
--- local id = stack.id
--- if id == glyph_code or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current ~= c then
--- head = insert_node_before(head,stack,copy_node(nsdata[c]))
--- current = c
--- done = true
--- end
--- -- here ? compare selective
--- if id == glue_code then --leader
--- -- same as *list
--- local content = stack.leader
--- if content then
--- local savedcurrent = current
--- local ci = content.id
--- if ci == hlist_code or ci == vlist_code then
--- -- else we reset inside a box unneeded, okay, the downside is
--- -- that we trigger color in each repeated box, so there is room
--- -- for improvement here
--- current = 0
--- end
--- local ok = false
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- stack.leader, ok = process(namespace,attribute,content,inheritance,outer)
--- else
--- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- else
--- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- current = savedcurrent
--- done = done or ok
--- end
--- end
--- elseif default and inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current = 0
--- done = true
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = stack.list
--- if content then
--- local ok = false
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
--- else
--- stack.list, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- else
--- stack.list, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- done = done or ok
--- end
--- end
--- stack = stack.next
--- end
--- return head, done
--- end
-
--- local function process(namespace,attribute,head,inheritance,default) -- one attribute
--- local stack, done = head, false
-
--- local function check()
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current ~= c then
--- head = insert_node_before(head,stack,copy_node(nsdata[c]))
--- current = c
--- done = true
--- end
--- elseif default and inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current = 0
--- done = true
--- end
--- return c
--- end
-
--- local function nested(content)
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- return process(namespace,attribute,content,inheritance,outer)
--- else
--- return process(namespace,attribute,content,inheritance,default)
--- end
--- else
--- return process(namespace,attribute,content,inheritance,default)
--- end
--- end
-
--- while stack do
--- local id = stack.id
--- if id == glyph_code then
--- check()
--- elseif id == glue_code then
--- local content = stack.leader
--- if content and check() then
--- local savedcurrent = current
--- local ci = content.id
--- if ci == hlist_code or ci == vlist_code then
--- -- else we reset inside a box unneeded, okay, the downside is
--- -- that we trigger color in each repeated box, so there is room
--- -- for improvement here
--- current = 0
--- end
-
--- local ok = false
--- stack.leader, ok = nested(content)
--- done = done or ok
-
--- current = savedcurrent
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = stack.list
--- if content then
-
--- local ok = false
--- stack.list, ok = nested(content)
--- done = done or ok
-
--- end
--- elseif id == rule_code then
--- if stack.width ~= 0 then
--- check()
--- end
--- end
--- stack = stack.next
--- end
--- return head, done
--- end
-
--- local function process(namespace,attribute,head,inheritance,default) -- one attribute
--- local stack, done = head, false
--- while stack do
--- local id = stack.id
--- if id == glyph_code then
--- -- begin of check
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current ~= c then
--- head = insert_node_before(head,stack,copy_node(nsdata[c]))
--- current = c
--- done = true
--- end
--- elseif default and inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current = 0
--- done = true
--- end
--- -- end of check
--- elseif id == glue_code then
--- local content = stack.leader
--- if content then
--- -- begin of check
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current ~= c then
--- head = insert_node_before(head,stack,copy_node(nsdata[c]))
--- current = c
--- done = true
--- end
--- -- begin special to this check
--- local savedcurrent = current
--- local ci = content.id
--- if ci == hlist_code or ci == vlist_code then
--- -- else we reset inside a box unneeded, okay, the downside is
--- -- that we trigger color in each repeated box, so there is room
--- -- for improvement here
--- current = 0
--- end
--- -- begin nested --
--- local ok = false
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- stack.leader, ok = process(namespace,attribute,content,inheritance,outer)
--- else
--- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- else
--- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- -- end nested --
--- done = done or ok
--- current = savedcurrent
--- -- end special to this check
--- elseif default and inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current = 0
--- done = true
--- end
--- -- end of check
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = stack.list
--- if content then
--- -- begin nested --
--- local ok
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
--- else
--- stack.list, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- else
--- stack.list, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- -- end nested --
--- done = done or ok
--- end
--- elseif id == rule_code then
--- if stack.width ~= 0 then
--- -- begin of check
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current ~= c then
--- head = insert_node_before(head,stack,copy_node(nsdata[c]))
--- current = c
--- done = true
--- end
--- elseif default and inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current = 0
--- done = true
--- end
--- -- end of check
--- end
--- end
--- stack = stack.next
--- end
--- return head, done
--- end
+-- if id == glyph_code or (id == whatsit_code and getsubtype(stack) == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then
local function process(namespace,attribute,head,inheritance,default) -- one attribute
local stack = head
@@ -537,53 +178,57 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
local check = false
local leader = nil
while stack do
- local id = stack.id
+ local id = getid(stack)
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = stack.leader
+ leader = getleader(stack)
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = stack.list
+ local content = getlist(stack)
if content then
-- begin nested --
- local ok
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
+ if nstrigger and getattr(stack,nstrigger) then
+ local outer = getattr(stack,attribute)
if outer ~= inheritance then
- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
+ local list, ok = process(namespace,attribute,content,inheritance,outer)
+ setfield(stack,"list",list)
+ done = done or ok
else
- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+ local list, ok = process(namespace,attribute,content,inheritance,default)
+ setfield(stack,"list",list)
+ done = done or ok
end
else
- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+ local list, ok = process(namespace,attribute,content,inheritance,default)
+ setfield(stack,"list",list)
+ done = done or ok
end
-- end nested --
- done = done or ok
end
elseif id == rule_code then
- check = stack.width ~= 0
+ check = getfield(stack,"width") ~= 0
end
-- much faster this way than using a check() and nested() function
if check then
- local c = stack[attribute]
+ local c = getattr(stack,attribute)
if c then
if default and c == inheritance then
if current ~= default then
- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+ head = insert_node_before(head,stack,copied(nsdata[default]))
current = default
done = true
end
elseif current ~= c then
- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+ head = insert_node_before(head,stack,copied(nsdata[c]))
current = c
done = true
end
if leader then
local savedcurrent = current
- local ci = leader.id
+ local ci = getid(leader)
if ci == hlist_code or ci == vlist_code then
-- else we reset inside a box unneeded, okay, the downside is
-- that we trigger color in each repeated box, so there is room
@@ -591,41 +236,48 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
current = 0
end
-- begin nested --
- local ok = false
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
+ if nstrigger and getattr(stack,nstrigger) then
+ local outer = getattr(stack,attribute)
if outer ~= inheritance then
- stack.leader, ok = process(namespace,attribute,leader,inheritance,outer)
+ local list, ok = process(namespace,attribute,leader,inheritance,outer)
+ setfield(stack,"leader",list)
+ done = done or ok
else
- stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
+ local list, ok = process(namespace,attribute,leader,inheritance,default)
+ setfield(stack,"leader",list)
+ done = done or ok
end
else
- stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
+ local list, ok = process(namespace,attribute,leader,inheritance,default)
+ setfield(stack,"leader",list)
+ done = done or ok
end
-- end nested --
- done = done or ok
current = savedcurrent
leader = false
end
elseif default and inheritance then
if current ~= default then
- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+ head = insert_node_before(head,stack,copied(nsdata[default]))
current = default
done = true
end
elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
+ head = insert_node_before(head,stack,copied(nsnone))
current = 0
done = true
end
check = false
end
- stack = stack.next
+ stack = getnext(stack)
end
return head, done
end
-states.process = process
+states.process = function(namespace,attribute,head,default)
+ local head, done = process(namespace,attribute,tonut(head),default)
+ return tonode(head), done
+end
-- we can force a selector, e.g. document wide color spaces, saves a little
-- watch out, we need to check both the selector state (like colorspace) and
@@ -633,277 +285,109 @@ states.process = process
-- state changes while the main state stays the same (like two glyphs following
-- each other with the same color but different color spaces e.g. \showcolor)
--- local function selective(namespace,attribute,head,inheritance,default) -- two attributes
--- local stack, done = head, false
--- while stack do
--- local id = stack.id
--- -- we need to deal with literals too (reset as well as oval)
--- -- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
--- if id == glyph_code -- or id == disc_code
--- or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- local data = nsdata[default]
--- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
--- current = default
--- done = true
--- end
--- else
--- local s = stack[nsselector]
--- if current ~= c or current_selector ~= s then
--- local data = nsdata[c]
--- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
--- current = c
--- current_selector = s
--- done = true
--- end
--- end
--- elseif default and inheritance then
--- if current ~= default then
--- local data = nsdata[default]
--- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current, current_selector, done = 0, 0, true
--- end
--- if id == glue_code then -- leader
--- -- same as *list
--- local content = stack.leader
--- if content then
--- local savedcurrent = current
--- local ci = content.id
--- if ci == hlist_code or ci == vlist_code then
--- -- else we reset inside a box unneeded, okay, the downside is
--- -- that we trigger color in each repeated box, so there is room
--- -- for improvement here
--- current = 0
--- end
--- local ok = false
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- stack.leader, ok = selective(namespace,attribute,content,inheritance,outer)
--- else
--- stack.leader, ok = selective(namespace,attribute,content,inheritance,default)
--- end
--- else
--- stack.leader, ok = selective(namespace,attribute,content,inheritance,default)
--- end
--- current = savedcurrent
--- done = done or ok
--- end
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = stack.list
--- if content then
--- local ok = false
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
--- else
--- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
--- end
--- else
--- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
--- end
--- done = done or ok
--- end
--- end
--- stack = stack.next
--- end
--- return head, done
--- end
-
--- local function selective(namespace,attribute,head,inheritance,default) -- two attributes
--- local stack, done = head, false
-
--- local function check()
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- local data = nsdata[default]
--- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
--- current = default
--- done = true
--- end
--- else
--- local s = stack[nsselector]
--- if current ~= c or current_selector ~= s then
--- local data = nsdata[c]
--- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
--- current = c
--- current_selector = s
--- done = true
--- end
--- end
--- elseif default and inheritance then
--- if current ~= default then
--- local data = nsdata[default]
--- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current, current_selector, done = 0, 0, true
--- end
--- return c
--- end
-
--- local function nested(content)
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- return selective(namespace,attribute,content,inheritance,outer)
--- else
--- return selective(namespace,attribute,content,inheritance,default)
--- end
--- else
--- return selective(namespace,attribute,content,inheritance,default)
--- end
--- end
-
--- while stack do
--- local id = stack.id
--- if id == glyph_code then
--- check()
--- elseif id == glue_code then
--- local content = stack.leader
--- if content and check() then
--- -- local savedcurrent = current
--- -- local ci = content.id
--- -- if ci == hlist_code or ci == vlist_code then
--- -- -- else we reset inside a box unneeded, okay, the downside is
--- -- -- that we trigger color in each repeated box, so there is room
--- -- -- for improvement here
--- -- current = 0
--- -- end
-
--- local ok = false
--- stack.leader, ok = nested(content)
--- done = done or ok
-
--- -- current = savedcurrent
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = stack.list
--- if content then
-
--- local ok = false
--- stack.list, ok = nested(content)
--- done = done or ok
-
--- end
--- elseif id == rule_code then
--- if stack.width ~= 0 then
--- check()
--- end
--- end
--- stack = stack.next
--- end
--- return head, done
--- end
-
local function selective(namespace,attribute,head,inheritance,default) -- two attributes
local stack = head
local done = false
local check = false
local leader = nil
while stack do
- local id = stack.id
+ local id = getid(stack)
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = stack.leader
+ leader = getleader(stack)
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = stack.list
+ local content = getlist(stack)
if content then
- local ok = false
-- begin nested
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
+ if nstrigger and getattr(stack,nstrigger) then
+ local outer = getattr(stack,attribute)
if outer ~= inheritance then
- stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
+ local list, ok = selective(namespace,attribute,content,inheritance,outer)
+ setfield(stack,"list",list)
+ done = done or ok
else
- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+ local list, ok = selective(namespace,attribute,content,inheritance,default)
+ setfield(stack,"list",list)
+ done = done or ok
end
else
- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+ local list, ok = selective(namespace,attribute,content,inheritance,default)
+ setfield(stack,"list",list)
+ done = done or ok
end
-- end nested
- done = done or ok
end
elseif id == rule_code then
- check = stack.width ~= 0
+ check = getfield(stack,"width") ~= 0
end
if check then
- local c = stack[attribute]
+ local c = getattr(stack,attribute)
if c then
if default and c == inheritance then
if current ~= default then
local data = nsdata[default]
- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+ head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
current = default
done = true
end
else
- local s = stack[nsselector]
+ local s = getattr(stack,nsselector)
if current ~= c or current_selector ~= s then
local data = nsdata[c]
- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+ head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
current = c
current_selector = s
done = true
end
end
if leader then
- local ok = false
-- begin nested
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
+ if nstrigger and getattr(stack,nstrigger) then
+ local outer = getatribute(stack,attribute)
if outer ~= inheritance then
- stack.leader, ok = selective(namespace,attribute,leader,inheritance,outer)
+ local list, ok = selective(namespace,attribute,leader,inheritance,outer)
+ setfield(stack,"leader",list)
+ done = done or ok
else
- stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
+ local list, ok = selective(namespace,attribute,leader,inheritance,default)
+ setfield(stack,"leader",list)
+ done = done or ok
end
else
- stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
+ local list, ok = selective(namespace,attribute,leader,inheritance,default)
+ setfield(stack,"leader",list)
+ done = done or ok
end
-- end nested
- done = done or ok
- leader = false
+ leader = false
end
elseif default and inheritance then
if current ~= default then
local data = nsdata[default]
- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+ head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
current = default
done = true
end
elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
+ head = insert_node_before(head,stack,copied(nsnone))
current, current_selector, done = 0, 0, true
end
check = false
end
-
- stack = stack.next
+ stack = getnext(stack)
end
return head, done
end
-states.selective = selective
+states.selective = function(namespace,attribute,head,default)
+ local head, done = selective(namespace,attribute,tonut(head),default)
+ return tonode(head), done
+end
-- Ideally the next one should be merged with the previous but keeping it separate is
-- safer. We deal with two situations: efficient boxwise (layoutareas) and mixed layers
@@ -914,77 +398,6 @@ states.selective = selective
-- Todo: make a better stacker. Keep track (in attribute) about nesting level. Not
-- entirely trivial and a generic solution is nicer (compares to the exporter).
--- local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
--- local stack, done = head, false
--- local current, depth = default or 0, 0
---
--- local function check()
--- local a = stack[attribute]
--- if a then
--- if current ~= a then
--- head = insert_node_before(head,stack,copy_node(nsdata[a]))
--- depth = depth + 1
--- current, done = a, true
--- end
--- elseif default > 0 then
--- --
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- depth = depth - 1
--- current, done = 0, true
--- end
--- return a
--- end
---
--- while stack do
--- local id = stack.id
--- if id == glyph_code then
--- check()
--- elseif id == glue_code then
--- local content = stack.leader
--- if content and check() then
--- local ok = false
--- stack.leader, ok = stacked(namespace,attribute,content,current)
--- done = done or ok
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = stack.list
--- if content then
--- -- the problem is that broken lines gets the attribute which can be a later one
--- if nslistwise then
--- local a = stack[attribute]
--- if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below
--- local p = current
--- current, done = a, true
--- head = insert_node_before(head,stack,copy_node(nsdata[a]))
--- stack.list = stacked(namespace,attribute,content,current)
--- head, stack = insert_node_after(head,stack,copy_node(nsnone))
--- current = p
--- else
--- local ok = false
--- stack.list, ok = stacked(namespace,attribute,content,current)
--- done = done or ok
--- end
--- else
--- local ok = false
--- stack.list, ok = stacked(namespace,attribute,content,current)
--- done = done or ok
--- end
--- end
--- elseif id == rule_code then
--- if stack.width ~= 0 then
--- check()
--- end
--- end
--- stack = stack.next
--- end
--- while depth > 0 do
--- head = insert_node_after(head,stack,copy_node(nsnone))
--- depth = depth - 1
--- end
--- return head, done
--- end
-
local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
local stack = head
local done = false
@@ -993,149 +406,83 @@ local function stacked(namespace,attribute,head,default) -- no triggering, no in
local check = false
local leader = false
while stack do
- local id = stack.id
+ local id = getid(stack)
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = stack.leader
+ leader = getleader(stack)
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = stack.list
+ local content = getlist(stack)
if content then
-- the problem is that broken lines gets the attribute which can be a later one
if nslistwise then
- local a = stack[attribute]
+ local a = getattr(stack,attribute)
if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below
local p = current
- current, done = a, true
- head = insert_node_before(head,stack,copy_node(nsdata[a]))
- stack.list = stacked(namespace,attribute,content,current)
- head, stack = insert_node_after(head,stack,copy_node(nsnone))
+ current = a
+ head = insert_node_before(head,stack,copied(nsdata[a]))
+ local list = stacked(namespace,attribute,content,current) -- two return values
+ setfield(stack,"list",list)
+ done = true
+ head, stack = insert_node_after(head,stack,copied(nsnone))
current = p
else
- local ok = false
- stack.list, ok = stacked(namespace,attribute,content,current)
+ local list, ok = stacked(namespace,attribute,content,current)
+ setfield(stack,"list",list) -- only if ok
done = done or ok
end
else
- local ok = false
- stack.list, ok = stacked(namespace,attribute,content,current)
+ local list, ok = stacked(namespace,attribute,content,current)
+ setfield(stack,"list",list) -- only if ok
done = done or ok
end
end
elseif id == rule_code then
- check = stack.width ~= 0
+ check = getfield(stack,"width") ~= 0
end
if check then
- local a = stack[attribute]
+ local a = getattr(stack,attribute)
if a then
if current ~= a then
- head = insert_node_before(head,stack,copy_node(nsdata[a]))
+ head = insert_node_before(head,stack,copied(nsdata[a]))
depth = depth + 1
current, done = a, true
end
if leader then
- local ok = false
- stack.leader, ok = stacked(namespace,attribute,content,current)
+ local list, ok = stacked(namespace,attribute,content,current)
+ setfield(stack,"leader",list) -- only if ok
done = done or ok
leader = false
end
elseif default > 0 then
--
elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
+ head = insert_node_before(head,stack,copied(nsnone))
depth = depth - 1
current, done = 0, true
end
check = false
end
-
- stack = stack.next
+ stack = getnext(stack)
end
while depth > 0 do
- head = insert_node_after(head,stack,copy_node(nsnone))
+ head = insert_node_after(head,stack,copied(nsnone))
depth = depth - 1
end
return head, done
end
-states.stacked = stacked
+states.stacked = function(namespace,attribute,head,default)
+ local head, done = stacked(namespace,attribute,tonut(head),default)
+ return tonode(head), done
+end
-- experimental
--- local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
--- nsbegin()
--- local current, previous, done, okay = head, head, false, false
--- local attrib = default or unsetvalue
---
--- local function check()
--- local a = current[attribute] or unsetvalue
--- if a ~= attrib then
--- local n = nsstep(a)
--- if n then
--- -- !!!! TEST CODE !!!!
--- -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a
--- head = insert_node_before(head,current,n) -- a
--- end
--- attrib, done, okay = a, true, true
--- end
--- return a
--- end
---
--- while current do
--- local id = current.id
--- if id == glyph_code then
--- check()
--- elseif id == glue_code then
--- local content = current.leader
--- if content and check() then
--- -- tricky as a leader has to be a list so we cannot inject before
--- local _, ok = stacker(namespace,attribute,content,attrib)
--- done = done or ok
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = current.list
--- if not content then
--- -- skip
--- elseif nslistwise then
--- local a = current[attribute]
--- if a and attrib ~= a and nslistwise[a] then -- viewerlayer
--- done = true
--- head = insert_node_before(head,current,copy_node(nsdata[a]))
--- current.list = stacker(namespace,attribute,content,a)
--- head, current = insert_node_after(head,current,copy_node(nsnone))
--- else
--- local ok = false
--- current.list, ok = stacker(namespace,attribute,content,attrib)
--- done = done or ok
--- end
--- else
--- local ok = false
--- current.list, ok = stacker(namespace,attribute,content,default)
--- done = done or ok
--- end
--- elseif id == rule_code then
--- if current.width ~= 0 then
--- check()
--- end
--- end
--- previous = current
--- current = current.next
--- end
--- if okay then
--- local n = nsend()
--- if n then
--- -- !!!! TEST CODE !!!!
--- -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)]))
--- head = insert_node_after(head,previous,n)
--- end
--- end
--- return head, done
--- end
-
local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
nsbegin()
local current = head
@@ -1146,52 +493,53 @@ local function stacker(namespace,attribute,head,default) -- no triggering, no in
local check = false
local leader = false
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = current.leader
+ leader = getleader(current)
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = current.list
+ local content = getlist(current)
if not content then
-- skip
elseif nslistwise then
- local a = current[attribute]
+ local a = getattr(current,attribute)
if a and attrib ~= a and nslistwise[a] then -- viewerlayer
+ head = insert_node_before(head,current,copied(nsdata[a]))
+ local list = stacker(namespace,attribute,content,a)
+ setfield(current,"list",list)
done = true
- head = insert_node_before(head,current,copy_node(nsdata[a]))
- current.list = stacker(namespace,attribute,content,a)
- head, current = insert_node_after(head,current,copy_node(nsnone))
+ head, current = insert_node_after(head,current,copied(nsnone))
else
- local ok = false
- current.list, ok = stacker(namespace,attribute,content,attrib)
+ local list, ok = stacker(namespace,attribute,content,attrib)
+ setfield(current,"list",list)
done = done or ok
end
else
- local ok = false
- current.list, ok = stacker(namespace,attribute,content,default)
+ local list, ok = stacker(namespace,attribute,content,default)
+ setfield(current,"list",list)
done = done or ok
end
elseif id == rule_code then
- check = current.width ~= 0
+ check = getfield(current,"width") ~= 0
end
if check then
- local a = current[attribute] or unsetvalue
+ local a = getattr(current,attribute) or unsetvalue
if a ~= attrib then
local n = nsstep(a)
if n then
-- !!!! TEST CODE !!!!
- -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a
- head = insert_node_before(head,current,n) -- a
+ -- head = insert_node_before(head,current,copied(nsdata[tonumber(n)])) -- a
+ head = insert_node_before(head,current,tonut(n)) -- a
end
attrib, done, okay = a, true, true
if leader then
-- tricky as a leader has to be a list so we cannot inject before
- local _, ok = stacker(namespace,attribute,leader,attrib)
+ local list, ok = stacker(namespace,attribute,leader,attrib)
done = done or ok
leader = false
end
@@ -1200,20 +548,23 @@ local function stacker(namespace,attribute,head,default) -- no triggering, no in
end
previous = current
- current = current.next
+ current = getnext(current)
end
if okay then
local n = nsend()
if n then
-- !!!! TEST CODE !!!!
- -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)]))
- head = insert_node_after(head,previous,n)
+ -- head = insert_node_after(head,previous,copied(nsdata[tostring(n)]))
+ head = insert_node_after(head,previous,tonut(n))
end
end
return head, done
end
-states.stacker = stacker
+states.stacker = function(namespace,attribute,head,default)
+ local head, done = stacker(namespace,attribute,tonut(head),default)
+ return tonode(head), done
+end
-- -- --
diff --git a/Master/texmf-dist/tex/context/base/node-fin.mkiv b/Master/texmf-dist/tex/context/base/node-fin.mkiv
index 09bac6c087d..2eb033fc16a 100644
--- a/Master/texmf-dist/tex/context/base/node-fin.mkiv
+++ b/Master/texmf-dist/tex/context/base/node-fin.mkiv
@@ -23,8 +23,12 @@
% we might have two variants at some point (efficiency)
-\def\finalizeobjectbox #1{\ctxcommand{finalizebox(\number#1)}}
-\def\finalizeshipoutbox#1{\ctxcommand{finalizebox(\number#1)}}
+\unexpanded\def\finalizeobjectbox #1{\ctxcommand{finalizebox(\number#1)}}
+\unexpanded\def\finalizeshipoutbox#1{\ctxcommand{finalizebox(\number#1)}}
+
+% Experimental (for Aditya):
+
+\unexpanded\def\cleanupbox#1{\ctxcommand{cleanupbox(\number#1)}}
% Tricky stuff: this might become obsolete.
diff --git a/Master/texmf-dist/tex/context/base/node-fnt.lua b/Master/texmf-dist/tex/context/base/node-fnt.lua
index 54359117e81..7000c4fd7ac 100644
--- a/Master/texmf-dist/tex/context/base/node-fnt.lua
+++ b/Master/texmf-dist/tex/context/base/node-fnt.lua
@@ -23,13 +23,26 @@ local fontdata = fonthashes.identifiers
local otf = fonts.handlers.otf
-local traverse_id = node.traverse_id
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
+
local nodecodes = nodes.nodecodes
local handlers = nodes.handlers
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getattr = nuts.getattr
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getnext = nuts.getnext
+
+local traverse_id = nuts.traverse_id
+
local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
local setmetatableindex = table.setmetatableindex
@@ -48,12 +61,31 @@ local run = 0
local setfontdynamics = { }
local fontprocesses = { }
+-- setmetatableindex(setfontdynamics, function(t,font)
+-- local tfmdata = fontdata[font]
+-- local shared = tfmdata.shared
+-- local v = shared and shared.dynamics and otf.setdynamics or false
+-- t[font] = v
+-- return v
+-- end)
+
setmetatableindex(setfontdynamics, function(t,font)
local tfmdata = fontdata[font]
local shared = tfmdata.shared
- local v = shared and shared.dynamics and otf.setdynamics or false
- t[font] = v
- return v
+ local f = shared and shared.dynamics and otf.setdynamics or false
+ if f then
+ local v = { }
+ t[font] = v
+ setmetatableindex(v,function(t,k)
+ local v = f(font,k)
+ t[k] = v
+ return v
+ end)
+ return v
+ else
+ t[font] = false
+ return false
+ end
end)
setmetatableindex(fontprocesses, function(t,font)
@@ -72,33 +104,42 @@ end)
fonts.hashes.setdynamics = setfontdynamics
fonts.hashes.processes = fontprocesses
+-- if we forget about basemode we don't need to test too much here and we can consider running
+-- over sub-ranges .. this involves a bit more initializations but who cares .. in that case we
+-- also need to use the stop criterium (we already use head too) ... we cannot use traverse
+-- then, so i'll test it on some local clone first ... the only pitfall is changed directions
+-- inside a run which means that we need to keep track of this which in turn complicates matters
+-- in a way i don't like
+
function handlers.characters(head)
-- either next or not, but definitely no already processed list
starttiming(nodes)
- local usedfonts, attrfonts, done = { }, { }, false
- local a, u, prevfont, prevattr = 0, 0, nil, 0
+ local usedfonts, attrfonts = { }, { }
+ local a, u, prevfont, prevattr, done = 0, 0, nil, 0, false
if trace_fontrun then
run = run + 1
report_fonts()
report_fonts("checking node list, run %s",run)
report_fonts()
- local n = head
+ local n = tonut(head)
while n do
- local id = n.id
+ local id = getid(n)
if id == glyph_code then
- local font = n.font
- local attr = n[0] or 0
- report_fonts("font %03i, dynamic %03i, glyph %s",font,attr,utf.char(n.char))
+ local font = getfont(n)
+ local attr = getattr(n,0) or 0
+ report_fonts("font %03i, dynamic %03i, glyph %C",font,attr,getchar(n))
+ elseif id == disc_code then
+ report_fonts("[disc] %s",nodes.listtoutf(n,true,false,n))
else
- report_fonts("[%s]",nodecodes[n.id])
+ report_fonts("[%s]",nodecodes[id])
end
- n = n.next
+ n = getnext(n)
end
end
- for n in traverse_id(glyph_code,head) do
- -- if n.subtype<256 then -- all are 1
- local font = n.font
- local attr = n[0] or 0 -- zero attribute is reserved for fonts in context
+ for n in traverse_id(glyph_code,tonut(head)) do
+ -- if getsubtype(n) <256 then -- all are 1
+ local font = getfont(n)
+ local attr = getattr(n,0) or 0 -- zero attribute is reserved for fonts in context
if font ~= prevfont or attr ~= prevattr then
if attr > 0 then
local used = attrfonts[font]
@@ -107,15 +148,10 @@ function handlers.characters(head)
attrfonts[font] = used
end
if not used[attr] then
- local sd = setfontdynamics[font]
- if sd then -- always true ?
- local d = sd(font,attr) -- can we cache this one?
- if d then
- used[attr] = d
- a = a + 1
- else
- -- can't happen ... otherwise best use nil/false distinction
- end
+ local fd = setfontdynamics[font]
+ if fd then
+ used[attr] = fd[attr]
+ a = a + 1
end
end
else
@@ -125,9 +161,7 @@ function handlers.characters(head)
if fp then
usedfonts[font] = fp
u = u + 1
- else
- -- can't happen ... otherwise best use nil/false distinction
- end
+ end
end
end
prevfont = font
@@ -141,34 +175,25 @@ function handlers.characters(head)
report_fonts("dynamics: %s",(a > 0 and concat(keys(attrfonts)," ")) or "none")
report_fonts()
end
+ -- in context we always have at least 2 processors
if u == 0 then
-- skip
elseif u == 1 then
local font, processors = next(usedfonts)
- local n = #processors
- if n > 0 then
- local h, d = processors[1](head,font,0)
- head = h or head
- done = done or d
- if n > 1 then
- for i=2,n do
- local h, d = processors[i](head,font,0)
- head = h or head
- done = done or d
- end
+ for i=1,#processors do
+ local h, d = processors[i](head,font,0)
+ if d then
+ head = h or head
+ done = true
end
end
else
for font, processors in next, usedfonts do
- local n = #processors
- local h, d = processors[1](head,font,0)
- head = h or head
- done = done or d
- if n > 1 then
- for i=2,n do
- local h, d = processors[i](head,font,0)
+ for i=1,#processors do
+ local h, d = processors[i](head,font,0)
+ if d then
head = h or head
- done = done or d
+ done = true
end
end
end
@@ -178,38 +203,22 @@ function handlers.characters(head)
elseif a == 1 then
local font, dynamics = next(attrfonts)
for attribute, processors in next, dynamics do -- attr can switch in between
- local n = #processors
- if n == 0 then
- report_fonts("no processors associated with dynamic %s",attribute)
- else
- local h, d = processors[1](head,font,attribute)
- head = h or head
- done = done or d
- if n > 1 then
- for i=2,n do
- local h, d = processors[i](head,font,attribute)
- head = h or head
- done = done or d
- end
+ for i=1,#processors do
+ local h, d = processors[i](head,font,attribute)
+ if d then
+ head = h or head
+ done = true
end
end
end
else
for font, dynamics in next, attrfonts do
for attribute, processors in next, dynamics do -- attr can switch in between
- local n = #processors
- if n == 0 then
- report_fonts("no processors associated with dynamic %s",attribute)
- else
- local h, d = processors[1](head,font,attribute)
- head = h or head
- done = done or d
- if n > 1 then
- for i=2,n do
- local h, d = processors[i](head,font,attribute)
- head = h or head
- done = done or d
- end
+ for i=1,#processors do
+ local h, d = processors[i](head,font,attribute)
+ if d then
+ head = h or head
+ done = true
end
end
end
@@ -222,5 +231,180 @@ function handlers.characters(head)
return head, true
end
-handlers.protectglyphs = node.protect_glyphs
-handlers.unprotectglyphs = node.unprotect_glyphs
+-- local formatters = string.formatters
+
+-- local function make(processors,font,attribute)
+-- _G.__temp = processors
+-- local t = { }
+-- for i=1,#processors do
+-- if processors[i] then
+-- t[#t+1] = formatters["local p_%s = _G.__temp[%s]"](i,i)
+-- end
+-- end
+-- t[#t+1] = "return function(head,done)"
+-- if #processors == 1 then
+-- t[#t+1] = formatters["return p_%s(head,%s,%s)"](1,font,attribute or 0)
+-- else
+-- for i=1,#processors do
+-- if processors[i] then
+-- t[#t+1] = formatters["local h,d=p_%s(head,%s,%s) if d then head=h or head done=true end"](i,font,attribute or 0)
+-- end
+-- end
+-- t[#t+1] = "return head, done"
+-- end
+-- t[#t+1] = "end"
+-- t = concat(t,"\n")
+-- t = load(t)(processors)
+-- _G.__temp = nil
+-- return t
+-- end
+
+-- setmetatableindex(fontprocesses, function(t,font)
+-- local tfmdata = fontdata[font]
+-- local shared = tfmdata.shared -- we need to check shared, only when same features
+-- local processes = shared and shared.processes
+-- if processes and #processes > 0 then
+-- processes = make(processes,font,0)
+-- t[font] = processes
+-- return processes
+-- else
+-- t[font] = false
+-- return false
+-- end
+-- end)
+
+-- setmetatableindex(setfontdynamics, function(t,font)
+-- local tfmdata = fontdata[font]
+-- local shared = tfmdata.shared
+-- local f = shared and shared.dynamics and otf.setdynamics or false
+-- if f then
+-- local v = { }
+-- t[font] = v
+-- setmetatableindex(v,function(t,k)
+-- local v = f(font,k)
+-- v = make(v,font,k)
+-- t[k] = v
+-- return v
+-- end)
+-- return v
+-- else
+-- t[font] = false
+-- return false
+-- end
+-- end)
+
+-- function handlers.characters(head)
+-- -- either next or not, but definitely no already processed list
+-- starttiming(nodes)
+-- local usedfonts, attrfonts
+-- local a, u, prevfont, prevattr, done = 0, 0, nil, 0, false
+-- if trace_fontrun then
+-- run = run + 1
+-- report_fonts()
+-- report_fonts("checking node list, run %s",run)
+-- report_fonts()
+-- local n = head
+-- while n do
+-- local id = n.id
+-- if id == glyph_code then
+-- local font = n.font
+-- local attr = n[0] or 0
+-- report_fonts("font %03i, dynamic %03i, glyph %s",font,attr,utf.char(n.char))
+-- else
+-- report_fonts("[%s]",nodecodes[n.id])
+-- end
+-- n = n.next
+-- end
+-- end
+-- for n in traverse_id(glyph_code,head) do
+-- -- if n.subtype<256 then -- all are 1
+-- local font = n.font
+-- local attr = n[0] or 0 -- zero attribute is reserved for fonts in context
+-- if font ~= prevfont or attr ~= prevattr then
+-- if attr > 0 then
+-- if not attrfonts then
+-- attrfonts = {
+-- [font] = {
+-- [attr] = setfontdynamics[font][attr]
+-- }
+-- }
+-- a = 1
+-- else
+-- local used = attrfonts[font]
+-- if not used then
+-- attrfonts[font] = {
+-- [attr] = setfontdynamics[font][attr]
+-- }
+-- a = a + 1
+-- elseif not used[attr] then
+-- used[attr] = setfontdynamics[font][attr]
+-- a = a + 1
+-- end
+-- end
+-- else
+-- if not usedfonts then
+-- local fp = fontprocesses[font]
+-- if fp then
+-- usedfonts = {
+-- [font] = fp
+-- }
+-- u = 1
+-- end
+-- else
+-- local used = usedfonts[font]
+-- if not used then
+-- local fp = fontprocesses[font]
+-- if fp then
+-- usedfonts[font] = fp
+-- u = u + 1
+-- end
+-- end
+-- end
+-- end
+-- prevfont = font
+-- prevattr = attr
+-- end
+-- -- end
+-- end
+-- if trace_fontrun then
+-- report_fonts()
+-- report_fonts("statics : %s",(u > 0 and concat(keys(usedfonts)," ")) or "none")
+-- report_fonts("dynamics: %s",(a > 0 and concat(keys(attrfonts)," ")) or "none")
+-- report_fonts()
+-- end
+-- if not usedfonts then
+-- -- skip
+-- elseif u == 1 then
+-- local font, processors = next(usedfonts)
+-- head, done = processors(head,done)
+-- else
+-- for font, processors in next, usedfonts do
+-- head, done = processors(head,done)
+-- end
+-- end
+-- if not attrfonts then
+-- -- skip
+-- elseif a == 1 then
+-- local font, dynamics = next(attrfonts)
+-- for attribute, processors in next, dynamics do
+-- head, done = processors(head,done)
+-- end
+-- else
+-- for font, dynamics in next, attrfonts do
+-- for attribute, processors in next, dynamics do
+-- head, done = processors(head,done)
+-- end
+-- end
+-- end
+-- stoptiming(nodes)
+-- if trace_characters then
+-- nodes.report(head,done)
+-- end
+-- return head, true
+-- end
+
+local d_protect_glyphs = nuts.protect_glyphs
+local d_unprotect_glyphs = nuts.unprotect_glyphs
+
+handlers.protectglyphs = function(n) return d_protect_glyphs (tonut(n)) end
+handlers.unprotectglyphs = function(n) return d_unprotect_glyphs(tonut(n)) end
diff --git a/Master/texmf-dist/tex/context/base/node-ini.lua b/Master/texmf-dist/tex/context/base/node-ini.lua
index 5a3986c3a4b..a9ef305c03f 100644
--- a/Master/texmf-dist/tex/context/base/node-ini.lua
+++ b/Master/texmf-dist/tex/context/base/node-ini.lua
@@ -13,13 +13,10 @@ modules.
-- this module is being reconstructed
-local next, type = next, type
-local format, match, gsub = string.format, string.match, string.gsub
+local next, type, tostring = next, type, tostring
+local gsub = string.gsub
local concat, remove = table.concat, table.remove
-local sortedhash, sortedkeys, swapped, tohash = table.sortedhash, table.sortedkeys, table.swapped, table.tohash
-local utfchar = utf.char
-local lpegmatch = lpeg.match
-local formatcolumns = utilities.formatters.formatcolumns
+local sortedhash, sortedkeys, swapped = table.sortedhash, table.sortedkeys, table.swapped
--[[ldx--
Access to nodes is what gives its power. Here we
@@ -54,20 +51,12 @@ into the engine, but this is a not so natural extension.
also ignore the empty nodes. [This is obsolete!]
--ldx]]--
-local traverse = node.traverse
-local traverse_id = node.traverse_id
-local free_node = node.free
-local remove_node = node.remove
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local node_fields = node.fields
-
-local allocate = utilities.storage.allocate
+nodes = nodes or { }
+local nodes = nodes
+nodes.handlers = nodes.handlers or { }
-nodes = nodes or { }
-local nodes = nodes
-
-nodes.handlers = nodes.handlers or { }
+local allocate = utilities.storage.allocate
+local formatcolumns = utilities.formatters.formatcolumns
-- there will be more of this:
@@ -103,7 +92,7 @@ local penaltycodes = allocate { -- unfortunately not used
table.setmetatableindex(penaltycodes,function(t,k) return "userpenalty" end) -- not used anyway
-local noadcodes = allocate {
+local noadcodes = allocate { -- simple nodes
[ 0] = "ord",
[ 1] = "opdisplaylimits",
[ 2] = "oplimits",
@@ -170,6 +159,20 @@ local disccodes = allocate {
[5] = "second", -- hard second item
}
+local accentcodes = allocate {
+ [0] = "bothflexible",
+ [1] = "fixedtop",
+ [2] = "fixedbottom",
+ [3] = "fixedboth",
+}
+
+local fencecodes = allocate {
+ [0] = "unset",
+ [1] = "left",
+ [2] = "middle",
+ [3] = "right",
+}
+
local function simplified(t)
local r = { }
for k, v in next, t do
@@ -193,6 +196,8 @@ mathcodes = allocate(swapped(mathcodes,mathcodes))
fillcodes = allocate(swapped(fillcodes,fillcodes))
margincodes = allocate(swapped(margincodes,margincodes))
disccodes = allocate(swapped(disccodes,disccodes))
+accentcodes = allocate(swapped(accentcodes,accentcodes))
+fencecodes = allocate(swapped(fencecodes,fencecodes))
nodes.skipcodes = skipcodes nodes.gluecodes = skipcodes -- more official
nodes.noadcodes = noadcodes
@@ -206,6 +211,8 @@ nodes.mathcodes = mathcodes
nodes.fillcodes = fillcodes
nodes.margincodes = margincodes
nodes.disccodes = disccodes nodes.discretionarycodes = disccodes
+nodes.accentcodes = accentcodes
+nodes.fencecodes = fencecodes
listcodes.row = listcodes.alignment
listcodes.column = listcodes.alignment
@@ -213,6 +220,8 @@ listcodes.column = listcodes.alignment
kerncodes.italiccorrection = kerncodes.userkern
kerncodes.kerning = kerncodes.fontkern
+whatcodes.textdir = whatcodes.dir
+
nodes.codes = allocate { -- mostly for listing
glue = skipcodes,
noad = noadcodes,
@@ -227,6 +236,8 @@ nodes.codes = allocate { -- mostly for listing
margin = margincodes,
disc = disccodes,
whatsit = whatcodes,
+ accent = accentcodes,
+ fence = fencecodes,
}
local report_codes = logs.reporter("nodes","codes")
@@ -248,174 +259,4 @@ function nodes.showcodes()
end
end
-local whatsit_node = nodecodes.whatsit
-
-local messyhack = tohash { -- temporary solution
- nodecodes.attributelist,
- nodecodes.attribute,
- nodecodes.gluespec,
- nodecodes.action,
-}
-
-function nodes.fields(n)
- local id = n.id
- if id == whatsit_node then
- return node_fields(id,n.subtype)
- else
- local t = node_fields(id)
- if messyhack[id] then
- for i=1,#t do
- if t[i] == "subtype" then
- remove(t,i)
- break
- end
- end
- end
- return t
- end
-end
-
trackers.register("system.showcodes", nodes.showcodes)
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glue_code = nodecodes.glue
-
--- if t.id == glue_code then
--- local s = t.spec
--- print(t)
--- print(s,s and s.writable)
--- if s and s.writable then
--- free_node(s)
--- end
--- t.spec = nil
--- end
-
-local function remove(head, current, free_too)
- local t = current
- head, current = remove_node(head,current)
- if t then
- if free_too then
- free_node(t)
- t = nil
- else
- t.next = nil
- t.prev = nil
- end
- end
- return head, current, t
-end
-
-nodes.remove = remove
-
-function nodes.delete(head,current)
- return remove(head,current,true)
-end
-
-nodes.before = insert_node_before
-nodes.after = insert_node_after
-
--- we need to test this, as it might be fixed now
-
-function nodes.before(h,c,n)
- if c then
- if c == h then
- n.next = h
- n.prev = nil
- h.prev = n
- else
- local cp = c.prev
- n.next = c
- n.prev = cp
- if cp then
- cp.next = n
- end
- c.prev = n
- return h, n
- end
- end
- return n, n
-end
-
-function nodes.after(h,c,n)
- if c then
- local cn = c.next
- if cn then
- n.next = cn
- cn.prev = n
- else
- n.next = nil
- end
- c.next = n
- n.prev = c
- return h, n
- end
- return n, n
-end
-
--- local h, c = nodes.replace(head,current,new)
--- local c = nodes.replace(false,current,new)
--- local c = nodes.replace(current,new)
-
-function nodes.replace(head,current,new) -- no head returned if false
- if not new then
- head, current, new = false, head, current
- end
- local prev, next = current.prev, current.next
- if next then
- new.next = next
- next.prev = new
- end
- if prev then
- new.prev = prev
- prev.next = new
- end
- if head then
- if head == current then
- head = new
- end
- free_node(current)
- return head, new
- else
- free_node(current)
- return new
- end
-end
-
--- will move
-
-local function count(stack,flat)
- local n = 0
- while stack do
- local id = stack.id
- if not flat and id == hlist_code or id == vlist_code then
- local list = stack.list
- if list then
- n = n + 1 + count(list) -- self counts too
- else
- n = n + 1
- end
- else
- n = n + 1
- end
- stack = stack.next
- end
- return n
-end
-
-nodes.count = count
-
-local left, space = lpeg.P("<"), lpeg.P(" ")
-
-local reference = left * (1-left)^0 * left * space^0 * lpeg.C((1-space)^0)
-
-function nodes.reference(n)
- return lpegmatch(reference,tostring(n))
-end
-
-if not node.next then
-
- function node.next(n) return n and n.next end
- function node.prev(n) return n and n.prev end
-
-end
diff --git a/Master/texmf-dist/tex/context/base/node-ini.mkiv b/Master/texmf-dist/tex/context/base/node-ini.mkiv
index 39d48a00a96..5fc519069db 100644
--- a/Master/texmf-dist/tex/context/base/node-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/node-ini.mkiv
@@ -18,7 +18,10 @@
\newcount\filterstate \filterstate\plusone % hm, public
\registerctxluafile{node-ini}{1.001}
+\registerctxluafile{node-met}{1.001}
+\registerctxluafile{node-nut}{1.001}
\registerctxluafile{node-res}{1.001}
+\registerctxluafile{node-ppt}{1.001} % experimental
\registerctxluafile{node-dir}{1.001}
\registerctxluafile{node-aux}{1.001}
\registerctxluafile{node-tst}{1.001}
@@ -32,6 +35,8 @@
\registerctxluafile{node-acc}{1.001} % experimental
%registerctxluafile{node-prp}{1.001} % makes no sense (yet)
+\doiffileelse{node-ppt.lua}{\registerctxluafile{node-ppt}{1.001}}{}
+
\newcount\c_node_tracers_show_box % box number
\unexpanded\def\shownextnodes{\afterassignment\node_tracers_show_next\c_node_tracers_show_box}
diff --git a/Master/texmf-dist/tex/context/base/node-inj.lua b/Master/texmf-dist/tex/context/base/node-inj.lua
index 697370cfb25..b91646ffc1a 100644
--- a/Master/texmf-dist/tex/context/base/node-inj.lua
+++ b/Master/texmf-dist/tex/context/base/node-inj.lua
@@ -8,8 +8,9 @@ if not modules then modules = { } end modules ['node-inj'] = {
-- This is very experimental (this will change when we have luatex > .50 and
-- a few pending thingies are available. Also, Idris needs to make a few more
--- test fonts. Btw, future versions of luatex will have extended glyph properties
--- that can be of help. Some optimizations can go away when we have faster machines.
+-- test fonts. Some optimizations can go away when we have faster machines.
+
+-- todo: ignore kerns between disc and glyph
local next = next
local utfchar = utf.char
@@ -29,15 +30,33 @@ local injections = nodes.injections
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local kern_code = nodecodes.kern
-local nodepool = nodes.pool
+
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
local newkern = nodepool.kern
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local traverse_id = nuts.traverse_id
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
local a_kernpair = attributes.private('kernpair')
-local a_ligacomp = attributes.private('ligacomp')
+----- a_ligacomp = attributes.private('ligacomp')
local a_markbase = attributes.private('markbase')
local a_markmark = attributes.private('markmark')
local a_markdone = attributes.private('markdone')
@@ -69,8 +88,8 @@ function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmne
local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
local ws, wn = tfmstart.width, tfmnext.width
local bound = #cursives + 1
- start[a_cursbase] = bound
- nxt[a_curscurs] = bound
+ setattr(start,a_cursbase,bound)
+ setattr(nxt,a_curscurs,bound)
cursives[bound] = { rlmode, dx, dy, ws, wn }
return dx, dy, bound
end
@@ -79,14 +98,14 @@ function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
-- dy = y - h
if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = current[a_kernpair]
+ local bound = getattr(current,a_kernpair)
if bound then
local kb = kerns[bound]
-- inefficient but singles have less, but weird anyway, needs checking
kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
else
bound = #kerns + 1
- current[a_kernpair] = bound
+ setattr(current,a_kernpair,bound)
kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
end
return x, y, w, h, bound
@@ -98,7 +117,7 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
local dx = factor*x
if dx ~= 0 then
local bound = #kerns + 1
- current[a_kernpair] = bound
+ setattr(current,a_kernpair,bound)
kerns[bound] = { rlmode, dx }
return dx, bound
else
@@ -106,9 +125,9 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
end
end
-function injections.setmark(start,base,factor,rlmode,ba,ma,index) -- ba=baseanchor, ma=markanchor
- local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
- local bound = base[a_markbase] -- fails again we should pass it
+function injections.setmark(start,base,factor,rlmode,ba,ma) -- ba=baseanchor, ma=markanchor
+ local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
+ local bound = getattr(base,a_markbase)
local index = 1
if bound then
local mb = marks[bound]
@@ -116,19 +135,18 @@ function injections.setmark(start,base,factor,rlmode,ba,ma,index) -- ba=baseanch
-- if not index then index = #mb + 1 end
index = #mb + 1
mb[index] = { dx, dy, rlmode }
- start[a_markmark] = bound
- start[a_markdone] = index
+ setattr(start,a_markmark,bound)
+ setattr(start,a_markdone,index)
return dx, dy, bound
else
- report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
+ report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound)
end
end
--- index = index or 1
index = index or 1
bound = #marks + 1
- base[a_markbase] = bound
- start[a_markmark] = bound
- start[a_markdone] = index
+ setattr(base,a_markbase,bound)
+ setattr(start,a_markmark,bound)
+ setattr(start,a_markdone,index)
marks[bound] = { [index] = { dx, dy, rlmode } }
return dx, dy, bound
end
@@ -140,15 +158,15 @@ end
local function trace(head)
report_injections("begin run")
for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local kp = n[a_kernpair]
- local mb = n[a_markbase]
- local mm = n[a_markmark]
- local md = n[a_markdone]
- local cb = n[a_cursbase]
- local cc = n[a_curscurs]
- local char = n.char
- report_injections("font %s, char %U, glyph %c",n.font,char,char)
+ if getsubtype(n) < 256 then
+ local kp = getattr(n,a_kernpair)
+ local mb = getattr(n,a_markbase)
+ local mm = getattr(n,a_markmark)
+ local md = getattr(n,a_markdone)
+ local cb = getattr(n,a_cursbase)
+ local cc = getattr(n,a_curscurs)
+ local char = getchar(n)
+ report_injections("font %s, char %U, glyph %c",getfont(n),char,char)
if kp then
local k = kerns[kp]
if k[3] then
@@ -196,22 +214,24 @@ local function show_result(head)
local current = head
local skipping = false
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",
+ getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
skipping = false
elseif id == kern_code then
- report_injections("kern: %p",current.kern)
+ report_injections("kern: %p",getfield(current,"kern"))
skipping = false
elseif not skipping then
report_injections()
skipping = true
end
- current = current.next
+ current = getnext(current)
end
end
function injections.handler(head,where,keep)
+ head = tonut(head)
local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
if has_marks or has_cursives then
if trace_injections then
@@ -222,17 +242,18 @@ function injections.handler(head,where,keep)
if has_kerns then -- move outside loop
local nf, tm = nil, nil
for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
- if n.subtype < 256 then
+ if getsubtype(n) < 256 then
nofvalid = nofvalid + 1
valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
+ local f = getfont(n)
+ if f ~= nf then
+ nf = f
+ tm = fontdata[nf].resources.marks -- other hash in ctx
end
if tm then
- mk[n] = tm[n.char]
+ mk[n] = tm[getchar(n)]
end
- local k = n[a_kernpair]
+ local k = getattr(n,a_kernpair)
if k then
local kk = kerns[k]
if kk then
@@ -252,15 +273,16 @@ function injections.handler(head,where,keep)
else
local nf, tm = nil, nil
for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
+ if getsubtype(n) < 256 then
nofvalid = nofvalid + 1
valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
+ local f = getfont(n)
+ if f ~= nf then
+ nf = f
+ tm = fontdata[nf].resources.marks -- other hash in ctx
end
if tm then
- mk[n] = tm[n.char]
+ mk[n] = tm[getchar(n)]
end
end
end
@@ -270,7 +292,7 @@ function injections.handler(head,where,keep)
local cx = { }
if has_kerns and next(ky) then
for n, k in next, ky do
- n.yoffset = k
+ setfield(n,"yoffset",k)
end
end
-- todo: reuse t and use maxt
@@ -281,9 +303,9 @@ function injections.handler(head,where,keep)
for i=1,nofvalid do -- valid == glyphs
local n = valid[i]
if not mk[n] then
- local n_cursbase = n[a_cursbase]
+ local n_cursbase = getattr(n,a_cursbase)
if p_cursbase then
- local n_curscurs = n[a_curscurs]
+ local n_curscurs = getattr(n,a_curscurs)
if p_cursbase == n_curscurs then
local c = cursives[n_curscurs]
if c then
@@ -308,20 +330,20 @@ function injections.handler(head,where,keep)
end
end
elseif maxt > 0 then
- local ny = n.yoffset
+ local ny = getfield(n,"yoffset")
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- ti.yoffset = ti.yoffset + ny
+ setfield(ti,"yoffset",getfield(ti,"yoffset") + ny)
end
maxt = 0
end
if not n_cursbase and maxt > 0 then
- local ny = n.yoffset
+ local ny = getfield(n,"yoffset")
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- ti.yoffset = ny
+ setfield(ti,"yoffset",ny)
end
maxt = 0
end
@@ -329,11 +351,11 @@ function injections.handler(head,where,keep)
end
end
if maxt > 0 then
- local ny = n.yoffset
+ local ny = getfield(n,"yoffset") -- hm, n unset ?
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- ti.yoffset = ny
+ setfield(ti,"yoffset",ny)
end
maxt = 0
end
@@ -344,52 +366,83 @@ function injections.handler(head,where,keep)
if has_marks then
for i=1,nofvalid do
local p = valid[i]
- local p_markbase = p[a_markbase]
+ local p_markbase = getattr(p,a_markbase)
if p_markbase then
- local mrks = marks[p_markbase]
- local nofmarks = #mrks
- for n in traverse_id(glyph_code,p.next) do
- local n_markmark = n[a_markmark]
+ local mrks = marks[p_markbase]
+ local nofmarks = #mrks
+ for n in traverse_id(glyph_code,getnext(p)) do
+ local n_markmark = getattr(n,a_markmark)
if p_markbase == n_markmark then
- local index = n[a_markdone] or 1
+ local index = getattr(n,a_markdone) or 1
local d = mrks[index]
if d then
local rlmode = d[3]
--
local k = wx[p]
+ local px = getfield(p,"xoffset")
+ local ox = 0
if k then
local x = k[2]
local w = k[4]
if w then
if rlmode and rlmode >= 0 then
-- kern(x) glyph(p) kern(w-x) mark(n)
- n.xoffset = p.xoffset - p.width + d[1] - (w-x)
+ ox = px - getfield(p,"width") + d[1] - (w-x)
+ -- report_injections("l2r case 1: %p",ox)
else
-- kern(w-x) glyph(p) kern(x) mark(n)
- n.xoffset = p.xoffset - d[1] - x
+ ox = px - d[1] - x
+ -- report_injections("r2l case 1: %p",ox)
end
else
if rlmode and rlmode >= 0 then
-- okay for husayni
- n.xoffset = p.xoffset - p.width + d[1]
+ ox = px - getfield(p,"width") + d[1]
+ -- report_injections("r2l case 2: %p",ox)
else
-- needs checking: is x ok here?
- n.xoffset = p.xoffset - d[1] - x
+ ox = px - d[1] - x
+ -- report_injections("r2l case 2: %p",ox)
end
end
else
+ -- if rlmode and rlmode >= 0 then
+ -- ox = px - getfield(p,"width") + d[1]
+ -- -- report_injections("l2r case 3: %p",ox)
+ -- else
+ -- ox = px - d[1]
+ -- -- report_injections("r2l case 3: %p",ox)
+ -- end
+ --
+ -- we need to deal with fonts that have marks with width
+ --
+ local wp = getfield(p,"width")
+ local wn = getfield(n,"width") -- in arial marks have widths
if rlmode and rlmode >= 0 then
- n.xoffset = p.xoffset - p.width + d[1]
+ ox = px - wp + d[1]
+ -- report_injections("l2r case 3: %p",ox)
else
- n.xoffset = p.xoffset - d[1]
+ ox = px - d[1]
+ -- report_injections("r2l case 3: %p",ox)
end
+ if wn ~= 0 then
+ -- bad: we should center
+ insert_node_before(head,n,newkern(-wn/2))
+ insert_node_after(head,n,newkern(-wn/2))
+ -- wx[n] = { 0, -wn/2, 0, -wn }
+ end
+ -- so far
end
- -- --
+ setfield(n,"xoffset",ox)
+ --
+ local py = getfield(p,"yoffset")
+ local oy = 0
if mk[p] then
- n.yoffset = p.yoffset + d[2]
+ oy = py + d[2]
else
- n.yoffset = n.yoffset + p.yoffset + d[2]
+ oy = getfield(n,"yoffset") + py + d[2]
end
+ setfield(n,"yoffset",oy)
--
if nofmarks == 1 then
break
@@ -397,6 +450,8 @@ function injections.handler(head,where,keep)
nofmarks = nofmarks - 1
end
end
+ elseif not n_markmark then
+ break -- HH: added 2013-09-12: no need to deal with non marks
else
-- KE: there can be sequences in ligatures
end
@@ -458,7 +513,7 @@ function injections.handler(head,where,keep)
-- if trace_injections then
-- show_result(head)
-- end
- return head, true
+ return tonode(head), true
elseif not keep then
kerns, cursives, marks = { }, { }, { }
end
@@ -467,14 +522,14 @@ function injections.handler(head,where,keep)
trace(head)
end
for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local k = n[a_kernpair]
+ if getsubtype(n) < 256 then
+ local k = getattr(n,a_kernpair)
if k then
local kk = kerns[k]
if kk then
local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
if y and y ~= 0 then
- n.yoffset = y -- todo: h ?
+ setfield(n,"yoffset",y) -- todo: h ?
end
if w then
-- copied from above
@@ -511,9 +566,9 @@ function injections.handler(head,where,keep)
-- if trace_injections then
-- show_result(head)
-- end
- return head, true
+ return tonode(head), true
else
-- no tracing needed
end
- return head, false
+ return tonode(head), false
end
diff --git a/Master/texmf-dist/tex/context/base/node-ltp.lua b/Master/texmf-dist/tex/context/base/node-ltp.lua
new file mode 100644
index 00000000000..6ad5de140b9
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/node-ltp.lua
@@ -0,0 +1,3192 @@
+if not modules then modules = { } end modules ['node-par'] = {
+ version = 1.001,
+ comment = "companion to node-par.mkiv",
+ author = "Hans Hagen",
+ copyright = "ConTeXt Development Team",
+ license = "see context related readme files",
+ comment = "a translation of the built in parbuilder, initial convertsin by Taco Hoekwater",
+}
+
+-- todo: remove nest_stack from linebreak.w
+-- todo: use ex field as signal (index in ?)
+-- todo: attr driven unknown/on/off
+-- todo: permit global steps i.e. using an attribute that sets min/max/step and overloads the font parameters
+-- todo: split the three passes into three functions
+-- todo: simplify the direction stack, no copy needed
+-- todo: see if we can do without delta nodes (needs thinking)
+-- todo: add more mkiv like tracing
+-- todo: add a couple of plugin hooks
+-- todo: maybe split expansion code paths
+-- todo: fix line numbers (cur_list.pg_field needed)
+-- todo: check and improve protrusion
+-- todo: arabic etc (we could use pretty large scales there) .. marks and cursive
+
+--[[
+
+ This code is derived from traditional TeX and has bits of pdfTeX, Aleph (Omega), and of course LuaTeX. So,
+ the basic algorithm for sure is not our work. On the other hand, the directional model in LuaTeX is cleaned
+ up as is other code. And of course there are hooks for callbacks.
+
+ The first version of the code below was a conversion of the C code that in turn was a conversion from the
+ original Pascal code. Around September 2008 we experimented with cq. discussed possible approaches to improved
+ typesetting of Arabic and as our policy is that extensions happen in Lua this means that we need a parbuilder
+ in Lua. Taco's first conversion still looked quite C-ish and in the process of cleaning up we uncovered some odd
+ bits and pieces in the original code as well. I did some first cleanup to get rid of C-artefacts, and Taco and I
+ spent the usual amount of Skyping to sort out problems. At that point we diverted to other LuaTeX issues.
+
+ A while later I decided to pick up this thread and decided to look into better ways to deal with font expansion
+ (aka hz). I got it running using a simpler method. One reason why the built-in mechanims is slow is that there is
+ lots of redudancy in calculations. Expanded widths are recalculated each time and because the hpakc routine does
+ it again that gives some overhead. In the process extra fonts are created with different dimensions so that the
+ backend can deal with it. The alternative method doesn't create fonts but passes an expansion factor to the
+ pdf generator. The small patch needed for the backend code worked more or less okay but was never intergated into
+ LuaTeX due to lack of time.
+
+ This all happened in 2010 while listening to Peter Gabriels "Scratch My Back" and Camels "Rayaz" so it was a
+ rather relaxed job.
+
+ In 2012 I picked up this thread. Because both languages are similar but also quite different it took some time
+ to get compatible output. Because the C code uses macros, careful checking was needed. Of course Lua's table model
+ and local variables brought some work as well. And still the code looks a bit C-ish. We could not divert too much
+ from the original model simply because it's well documented but future versions (or variants) might as well look
+ different.
+
+ Eventually I'll split this code into passes so that we can better see what happens, but first we need to reach
+ a decent level of stability. The current expansion results are not the same as the built-in but that was never
+ the objective. It all has to do with slightly different calculations.
+
+ The original C-code related to protrusion and expansion is not that efficient as many (redundant) function
+ calls take place in the linebreaker and packer. As most work related to fonts is done in the backend, we
+ can simply stick to width calculations here. Also, it is no problem at all that we use floating point
+ calculations (as Lua has only floats). The final result will look ok as the hpack will nicely compensate
+ for rounding errors as it will normally distribute the content well enough. And let's admit: most texies
+ won't see it anyway. As long as we're cross platform compatible it's fine.
+
+ We use the table checked_expansion to keep track of font related parameters (per paragraph). The table is
+ also the signal that we have adjustments > 1. In retrospect one might wonder if adjusting kerns is such a
+ good idea because other spacing is also not treated. If we would stick to the regular hpack routine
+ we do have to follow the same logic, but I decided to use a Lua hpacker so that constraint went away. And
+ anyway, instead of doing a lookup in the kern table (that we don't have in node mode) the set kern value
+ is used. Disabling kern scaling will become an option in Luatex some day. You can blame me for all errors
+ that crept in and I know that there are some.
+
+ To be honest, I slowly start to grasp the magic here as normally I start from scratch when implementing
+ something (as it's the only way I can understand things). This time I had a recently acquired stack of
+ Porcupine Tree disks to get me through, although I must admit that watching their dvd's is more fun
+ than coding.
+
+ Picking up this effort was inspired by discussions between Luigi Scarso and me about efficiency of Lua
+ code and we needed some stress tests to compare regular LuaTeX and LuajitTeX. One of the tests was
+ processing tufte.tex as that one has lots of hyphenations and is a tough one to get right.
+
+ tufte: boxed 1000 times, no flushing in backend:
+
+ \testfeatureonce{1000}{\setbox0\hbox{\tufte}}
+ \testfeatureonce{1000}{\setbox0\vbox{\tufte}}
+ \startparbuilder[basic]\testfeatureonce{1000}{\setbox0\vbox{\tufte}}\stopparbuilder
+
+ method normal hz comment
+
+ luatex tex hbox 9.64 9.64 baseline font feature processing, hyphenation etc: 9.74
+ tex vbox 9.84 10.16 0.20 linebreak / 0.52 with hz -> 0.32 hz overhead (150pct more)
+ lua vbox 17.28 18.43 7.64 linebreak / 8.79 with hz -> 1.33 hz overhead ( 20pct more)
+
+ new laptop | no nuts
+ 3.42 baseline
+ 3.63 0.21 linebreak
+ 7.38 3.96 linebreak
+
+ new laptop | most nuts
+ 2.45 baseline
+ 2.53 0.08 linebreak
+ 6.16 3.71 linebreak
+ ltp nuts 5.45 3.00 linebreak
+
+ luajittex tex hbox 6.33 6.33 baseline font feature processing, hyphenation etc: 6.33
+ tex vbox 6.53 6.81 0.20 linebreak / 0.48 with hz -> 0.28 hz overhead (expected 0.32)
+ lua vbox 11.06 11.81 4.53 linebreak / 5.28 with hz -> 0.75 hz overhead
+
+ new laptop | no nuts
+ 2.06 baseline
+ 2.27 0.21 linebreak
+ 3.95 1.89 linebreak
+
+ new laptop | most nuts
+ 1.25 baseline
+ 1.30 0.05 linebreak
+ 3.03 1.78 linebreak
+ ltp nuts 2.47 1.22 linebreak
+
+ Interesting is that the runtime for the built-in parbuilder indeed increases much when expansion
+ is enabled, but in the Lua variant the extra overhead is way less significant. This means that when we
+ retrofit the same approach into the core, the overhead of expansion can be sort of nilled.
+
+ In 2013 the expansion factor method became also used at the TeX end so then I could complete the code
+ here, and indeed, expansions works quite well now (not compatible of course because we use floats at the
+ Lua end. The Lua base variant is still slower but quite ok, especially if we go nuts.
+
+ A next iteration will provide plug-ins and more control. I will also explore the possibility to avoid the
+ redundant hpack calculations (easier now, although I've only done some quick and dirty experiments.)
+
+]]--
+
+local utfchar = utf.char
+local write, write_nl = texio.write, texio.write_nl
+local sub, format = string.sub, string.format
+local round, floor = math.round, math.floor
+local insert, remove = table.insert, table.remove
+
+local fonts, nodes, node = fonts, nodes, node
+
+local trace_basic = false trackers.register("builders.paragraphs.basic", function(v) trace_basic = v end)
+local trace_lastlinefit = false trackers.register("builders.paragraphs.lastlinefit", function(v) trace_lastlinefit = v end)
+local trace_adjusting = false trackers.register("builders.paragraphs.adjusting", function(v) trace_adjusting = v end)
+local trace_protruding = false trackers.register("builders.paragraphs.protruding", function(v) trace_protruding = v end)
+local trace_expansion = false trackers.register("builders.paragraphs.expansion", function(v) trace_expansion = v end)
+local trace_quality = false trackers.register("builders.paragraphs.quality", function(v) trace_quality = v end)
+
+local report_parbuilders = logs.reporter("nodes","parbuilders")
+local report_hpackers = logs.reporter("nodes","hpackers")
+
+local calculate_badness = tex.badness
+local texnest = tex.nest
+local texlists = tex.lists
+
+-- (t == 0 and 0) or (s <= 0 and 10000) or calculate_badness(t,s)
+
+-- local function calculate_badness(t,s)
+-- if t == 0 then
+-- return 0
+-- elseif s <= 0 then
+-- return 10000 -- infinite_badness
+-- else
+-- local r
+-- if t <= 7230584 then
+-- r = (t * 297) / s
+-- elseif s >= 1663497 then
+-- r = t / (s / 297)
+-- else
+-- r = t
+-- end
+-- if r > 1290 then
+-- return 10000 -- infinite_badness
+-- else
+-- return (r * r * r + 0x20000) / 0x40000
+-- end
+-- end
+-- end
+
+local parbuilders = builders.paragraphs
+local constructors = parbuilders.constructors
+
+local setmetatableindex = table.setmetatableindex
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+local chardata = fonthashes.characters
+local quaddata = fonthashes.quads
+local parameters = fonthashes.parameters
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getlist = nuts.getlist
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+
+local slide_nodelist = nuts.slide -- get rid of this, probably ok > 78.2
+local find_tail = nuts.tail
+local new_node = nuts.new
+local copy_node = nuts.copy
+local copy_nodelist = nuts.copy_list
+local flush_node = nuts.free
+local flush_nodelist = nuts.flush_list
+local hpack_nodes = nuts.hpack
+local xpack_nodes = nuts.hpack
+local replace_node = nuts.replace
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local traverse_by_id = nuts.traverse_id
+
+local setnodecolor = nodes.tracers.colors.set
+
+local nodepool = nuts.pool
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+local kerncodes = nodes.kerncodes
+local glyphcodes = nodes.glyphcodes
+local gluecodes = nodes.gluecodes
+local margincodes = nodes.margincodes
+local disccodes = nodes.disccodes
+local mathcodes = nodes.mathcodes
+local fillcodes = nodes.fillcodes
+
+local temp_code = nodecodes.temp
+local glyph_code = nodecodes.glyph
+local ins_code = nodecodes.ins
+local mark_code = nodecodes.mark
+local adjust_code = nodecodes.adjust
+local penalty_code = nodecodes.penalty
+local whatsit_code = nodecodes.whatsit
+local disc_code = nodecodes.disc
+local math_code = nodecodes.math
+local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local unset_code = nodecodes.unset
+local marginkern_code = nodecodes.marginkern
+
+local leaders_code = gluecodes.leaders
+
+local localpar_code = whatcodes.localpar
+local dir_code = whatcodes.dir
+local pdfrefximage_code = whatcodes.pdfrefximage
+local pdfrefxform_code = whatcodes.pdfrefxform
+
+local kerning_code = kerncodes.kerning -- font kern
+local userkern_code = kerncodes.userkern
+
+local ligature_code = glyphcodes.ligature
+
+local stretch_orders = nodes.fillcodes
+
+local leftmargin_code = margincodes.left
+local rightmargin_code = margincodes.right
+
+local automatic_disc_code = disccodes.automatic
+local regular_disc_code = disccodes.regular
+local first_disc_code = disccodes.first
+local second_disc_code = disccodes.second
+
+local endmath_code = mathcodes.endmath
+
+local nosubtype_code = 0
+
+local unhyphenated_code = nodecodes.unhyphenated or 1
+local hyphenated_code = nodecodes.hyphenated or 2
+local delta_code = nodecodes.delta or 3
+local passive_code = nodecodes.passive or 4
+
+local maxdimen = number.maxdimen
+
+local max_halfword = 0x7FFFFFFF
+local infinite_penalty = 10000
+local eject_penalty = -10000
+local infinite_badness = 10000
+local awful_badness = 0x3FFFFFFF
+
+local fit_very_loose_class = 0 -- fitness for lines stretching more than their stretchability
+local fit_loose_class = 1 -- fitness for lines stretching 0.5 to 1.0 of their stretchability
+local fit_decent_class = 2 -- fitness for all other lines
+local fit_tight_class = 3 -- fitness for lines shrinking 0.5 to 1.0 of their shrinkability
+
+local new_penalty = nodepool.penalty
+local new_dir = nodepool.textdir
+local new_leftmarginkern = nodepool.leftmarginkern
+local new_rightmarginkern = nodepool.rightmarginkern
+local new_leftskip = nodepool.leftskip
+local new_rightskip = nodepool.rightskip
+local new_lineskip = nodepool.lineskip
+local new_baselineskip = nodepool.baselineskip
+local new_temp = nodepool.temp
+local new_rule = nodepool.rule
+
+local is_rotated = nodes.is_rotated
+local is_parallel = nodes.textdir_is_parallel
+local is_opposite = nodes.textdir_is_opposite
+local textdir_is_equal = nodes.textdir_is_equal
+local pardir_is_equal = nodes.pardir_is_equal
+local glyphdir_is_equal = nodes.glyphdir_is_equal
+
+local dir_pops = nodes.dir_is_pop
+local dir_negations = nodes.dir_negation
+local is_skipable = nuts.protrusion_skippable
+
+local a_fontkern = attributes.private('fontkern')
+
+-- helpers --
+
+-- It makes more sense to move the somewhat messy dir state tracking
+-- out of the main functions. First we create a stack allocator.
+
+local function new_dir_stack(dir) -- also use elsewhere
+ return { n = 0, dir }
+end
+
+-- The next function checks a dir node and returns the new dir state. By
+-- using s static table we are quite efficient. This function is used
+-- in the parbuilder.
+
+local function checked_line_dir(stack,current)
+ if not dir_pops[current] then
+ local n = stack.n + 1
+ stack.n = n
+ stack[n] = current
+ return getfield(current,"dir")
+ elseif n > 0 then
+ local n = stack.n
+ local dirnode = stack[n]
+ dirstack.n = n - 1
+ return getfield(dirnode,"dir")
+ else
+ report_parbuilders("warning: missing pop node (%a)",1) -- in line ...
+ end
+end
+
+-- The next function checks a dir nodes in a list and appends the negations
+-- that are currently needed (some day LuaTeX will be more tolerant). We use
+-- the negations for the next line.
+
+local function inject_dirs_at_end_of_line(stack,current,start,stop)
+ local e = start
+ local n = stack.n
+ local h = nil
+ while start and start ~= stop do
+ if getid(start) == whatsit_code and getsubtype(start) == dir_code then
+ if not dir_pops[getfield(start,"dir")] then -- weird, what is this #
+ n = n + 1
+ stack[n] = start
+ elseif n > 0 then
+ n = n - 1
+ else
+ report_parbuilders("warning: missing pop node (%a)",2) -- in line ...
+ end
+ end
+ start = getnext(start)
+ end
+ for i=n,1,-1 do
+ h, current = insert_node_after(current,current,new_dir(dir_negations[getfield(stack[i],"dir")]))
+ end
+ stack.n = n
+ return current
+end
+
+local function inject_dirs_at_begin_of_line(stack,current)
+ local h = nil
+ for i=stack.n,1,-1 do
+ h, current = insert_node_after(current,current,new_dir(stack[i]))
+ end
+ stack.n = 0
+ return current
+end
+
+-- diagnostics --
+
+local dummy = function() end
+
+local diagnostics = {
+ start = dummy,
+ stop = dummy,
+ current_pass = dummy,
+ break_node = dummy,
+ feasible_break = dummy,
+}
+
+-- statistics --
+
+local nofpars, noflines, nofprotrudedlines, nofadjustedlines = 0, 0, 0, 0
+
+local function register_statistics(par)
+ local statistics = par.statistics
+ nofpars = nofpars + 1
+ noflines = noflines + statistics.noflines
+ nofprotrudedlines = nofprotrudedlines + statistics.nofprotrudedlines
+ nofadjustedlines = nofadjustedlines + statistics.nofadjustedlines
+end
+
+-- resolvers --
+
+local whatsiters = {
+ get_width = { },
+ get_dimensions = { },
+}
+
+local get_whatsit_width = whatsiters.get_width
+local get_whatsit_dimensions = whatsiters.get_dimensions
+
+local function get_width (n,dir) return getfield(n,"width") end
+local function get_dimensions(n,dir) return getfield(n,"width"), getfield(n,"height"), getfield(n,"depth") end
+
+get_whatsit_width[pdfrefximage_code] = get_width
+get_whatsit_width[pdfrefxform_code ] = get_width
+
+get_whatsit_dimensions[pdfrefximage_code] = get_dimensions
+get_whatsit_dimensions[pdfrefxform_code ] = get_dimensions
+
+-- expansion etc --
+
+local function calculate_fraction(x,n,d,max_answer)
+ local the_answer = x * n/d + 1/2 -- round ?
+ if the_answer > max_answer then
+ return max_answer
+ elseif the_answer < -max_answer then
+ return -max_answer
+ else
+ return the_answer
+ end
+end
+
+local function check_shrinkage(par,n)
+ -- called often, so maybe move inline -- use NORMAL
+ if getfield(n,"shrink_order") ~= 0 and getfield(n,"shrink") ~= 0 then
+ if par.no_shrink_error_yet then
+ par.no_shrink_error_yet = false
+ report_parbuilders("infinite glue shrinkage found in a paragraph and removed")
+ end
+ n = copy_node(n)
+ setfield(n,"shrink_order",0)
+ end
+ return n
+end
+
+-- It doesn't really speed up much but the additional memory usage is
+-- rather small so it doesn't hurt too much.
+
+local expansions = { }
+local nothing = { stretch = 0, shrink = 0 }
+
+setmetatableindex(expansions,function(t,font) -- we can store this in tfmdata if needed
+ local expansion = parameters[font].expansion -- can be an extra hash
+ if expansion and expansion.auto then
+ local factors = { }
+ local c = chardata[font]
+ setmetatableindex(factors,function(t,char)
+ local fc = c[char]
+ local ef = fc.expansion_factor
+ if ef and ef > 0 then
+ local stretch = expansion.stretch
+ local shrink = expansion.shrink
+ if stretch ~= 0 or shrink ~= 0 then
+ local factor = ef / 1000
+ local ef_quad = factor * quaddata[font] / 1000
+ local v = {
+ glyphstretch = stretch * ef_quad,
+ glyphshrink = shrink * ef_quad,
+ factor = factor,
+ stretch = stretch,
+ shrink = shrink,
+ }
+ t[char] = v
+ return v
+ end
+ end
+ t[char] = nothing
+ return nothing
+ end)
+ t[font] = factors
+ return factors
+ else
+ t[font] = false
+ return false
+ end
+end)
+
+local function kern_stretch_shrink(p,d)
+ local left = getprev(p)
+ if left and getid(left) == glyph_code then -- how about disc nodes?
+ local data = expansions[getfont(left)][getchar(left)]
+ if data then
+ local stretch = data.stretch
+ local shrink = data.shrink
+ if stretch ~= 0 then
+ -- stretch = data.factor * (d * stretch - d)
+ stretch = data.factor * d * (stretch - 1)
+ end
+ if shrink ~= 0 then
+ -- shrink = data.factor * (d * shrink - d)
+ shrink = data.factor * d * (shrink - 1)
+ end
+ return stretch, shrink
+ end
+ end
+ return 0, 0
+end
+
+local expand_kerns = false
+----- expand_kerns = "both"
+
+directives.register("builders.paragraphs.adjusting.kerns",function(v)
+ if not v then
+ expand_kerns = false
+ elseif v == "stretch" or v == "shrink" then
+ expand_kerns = v
+ elseif v == "both" then
+ expand_kerns = true
+ else
+ expand_kerns = toboolean(v,true) or false
+ end
+end)
+
+-- state:
+
+local function check_expand_pars(checked_expansion,f)
+ local expansion = parameters[f].expansion
+ if not expansion then
+ checked_expansion[f] = false
+ return false
+ end
+-- expansion.step = 1
+ local step = expansion.step or 0
+ local stretch = expansion.stretch or 0
+ local shrink = expansion.shrink or 0
+ if step == 0 or (stretch == 0 and schrink == 0) then
+ checked_expansion[f] = false
+ return false
+ end
+ local par = checked_expansion.par
+ if par.cur_font_step < 0 then
+ par.cur_font_step = step
+ elseif par.cur_font_step ~= step then
+ report_parbuilders("using fonts with different step of expansion in one paragraph is not allowed")
+ checked_expansion[f] = false
+ return false
+ end
+ if stretch == 0 then
+ -- okay
+ elseif par.max_stretch_ratio < 0 then
+ par.max_stretch_ratio = stretch -- expansion_factor
+ elseif par.max_stretch_ratio ~= stretch then
+ report_parbuilders("using fonts with different stretch limit of expansion in one paragraph is not allowed")
+ checked_expansion[f] = false
+ return false
+ end
+ if shrink == 0 then
+ -- okay
+ elseif par.max_shrink_ratio < 0 then
+ par.max_shrink_ratio = shrink -- - expansion_factor
+ elseif par.max_shrink_ratio ~= shrink then
+ report_parbuilders("using fonts with different shrink limit of expansion in one paragraph is not allowed")
+ checked_expansion[f] = false
+ return false
+ end
+ if trace_adjusting then
+ report_parbuilders("expanding font %a using step %a, shrink %a and stretch %a",f,step,stretch,shrink)
+ end
+ local e = expansions[f]
+ checked_expansion[f] = e
+ return e
+end
+
+local function check_expand_lines(checked_expansion,f)
+ local expansion = parameters[f].expansion
+ if not expansion then
+ checked_expansion[f] = false
+ return false
+ end
+-- expansion.step = 1
+ local step = expansion.step or 0
+ local stretch = expansion.stretch or 0
+ local shrink = expansion.shrink or 0
+ if step == 0 or (stretch == 0 and schrink == 0) then
+ checked_expansion[f] = false
+ return false
+ end
+ if trace_adjusting then
+ report_parbuilders("expanding font %a using step %a, shrink %a and stretch %a",f,step,stretch,shrink)
+ end
+ local e = expansions[f]
+ checked_expansion[f] = e
+ return e
+end
+
+-- protrusion
+
+local function find(head) -- do we really want to recurse into an hlist?
+ while head do
+ local id = getid(head)
+ if id == glyph_code then
+ return head
+ elseif id == hlist_code then
+ local found = find(getlist(head))
+ if found then
+ return found
+ else
+ head = getnext(head)
+ end
+ elseif is_skipable(head) then
+ head = getnext(head)
+ else
+ return head
+ end
+ end
+ return nil
+end
+
+local function find_protchar_left(l) -- weird function
+ local ln = getnext(l)
+ if ln and getid(ln) == hlist_code and not getlist(ln) and getfield(ln,"width") == 0 and getfield(ln,"height") == 0 and getfield(ln,"depth") == 0 then
+ l = getnext(l)
+ else -- if d then -- was always true
+ local id = getid(l)
+ while ln and not (id == glyph_code or id < math_code) do -- is there always a glyph?
+ l = ln
+ ln = getnext(l)
+ id = getid(ln)
+ end
+ end
+ -- if getid(l) == glyph_code then
+ -- return l
+ -- end
+ return find(l) or l
+end
+
+local function find(head,tail)
+ local tail = tail or find_tail(head)
+ while tail do
+ local id = getid(tail)
+ if id == glyph_code then
+ return tail
+ elseif id == hlist_code then
+ local found = find(getlist(tail))
+ if found then
+ return found
+ else
+ tail = getprev(tail)
+ end
+ elseif is_skipable(tail) then
+ tail = getprev(tail)
+ else
+ return tail
+ end
+ end
+ return nil
+end
+
+local function find_protchar_right(l,r)
+ return r and find(l,r) or r
+end
+
+local function left_pw(p)
+ local font = getfont(p)
+ local prot = chardata[font][getchar(p)].left_protruding
+ if not prot or prot == 0 then
+ return 0
+ end
+ return prot * quaddata[font] / 1000, p
+end
+
+local function right_pw(p)
+ local font = getfont(p)
+ local prot = chardata[font][getchar(p)].right_protruding
+ if not prot or prot == 0 then
+ return 0
+ end
+ return prot * quaddata[font] / 1000, p
+end
+
+-- par parameters
+
+local function reset_meta(par)
+ local active = {
+ id = hyphenated_code,
+ line_number = max_halfword,
+ }
+ active.next = par.active -- head of metalist
+ par.active = active
+ par.passive = nil
+end
+
+local function add_to_width(line_break_dir,checked_expansion,s) -- split into two loops (normal and expansion)
+ local size = 0
+ local adjust_stretch = 0
+ local adjust_shrink = 0
+ while s do
+ local id = getid(s)
+ if id == glyph_code then
+ if is_rotated[line_break_dir] then -- can be shared
+ size = size + getfield(s,"height") + getfield(s,"depth")
+ else
+ size = size + getfield(s,"width")
+ end
+ if checked_expansion then
+ local data = checked_expansion[getfont(s)]
+ if data then
+ data = data[getchar(s)]
+ if data then
+ adjust_stretch = adjust_stretch + data.glyphstretch
+ adjust_shrink = adjust_shrink + data.glyphshrink
+ end
+ end
+ end
+ elseif id == hlist_code or id == vlist_code then
+ if is_parallel[getfield(s,"dir")][line_break_dir] then
+ size = size + getfield(s,"width")
+ else
+ size = size + getfield(s,"height") + getfield(s,"depth")
+ end
+ elseif id == kern_code then
+ local kern = getfield(s,"kern")
+ if kern ~= 0 then
+ if checked_expansion and expand_kerns and (getsubtype(s) == kerning_code or getattr(a_fontkern)) then
+ local stretch, shrink = kern_stretch_shrink(s,kern)
+ if expand_kerns == "stretch" then
+ adjust_stretch = adjust_stretch + stretch
+ elseif expand_kerns == "shrink" then
+ adjust_shrink = adjust_shrink + shrink
+ else
+ adjust_stretch = adjust_stretch + stretch
+ adjust_shrink = adjust_shrink + shrink
+ end
+ end
+ size = size + kern
+ end
+ elseif id == rule_code then
+ size = size + getfield(s,"width")
+ elseif trace_unsupported then
+ report_parbuilders("unsupported node at location %a",6)
+ end
+ s = getnext(s)
+ end
+ return size, adjust_stretch, adjust_shrink
+end
+
+local function compute_break_width(par,break_type,p) -- split in two
+ local break_width = par.break_width
+ if break_type > unhyphenated_code then
+ local disc_width = par.disc_width
+ local checked_expansion = par.checked_expansion
+ local line_break_dir = par.line_break_dir
+ local break_size = break_width.size + disc_width.size
+ local break_adjust_stretch = break_width.adjust_stretch + disc_width.adjust_stretch
+ local break_adjust_shrink = break_width.adjust_shrink + disc_width.adjust_shrink
+ local replace = getfield(p,"replace")
+ if replace then
+ local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,replace)
+ break_size = break_size - size
+ break_adjust_stretch = break_adjust_stretch - adjust_stretch
+ break_adjust_shrink = break_adjust_shrink - adjust_shrink
+ end
+ local post = getfield(p,"post")
+ if post then
+ local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,post)
+ break_size = break_size + size
+ break_adjust_stretch = break_adjust_stretch + adjust_stretch
+ break_adjust_shrink = break_adjust_shrink + adjust_shrink
+ end
+ break_width.size = break_size
+ break_width.adjust_stretch = break_adjust_stretch
+ break_width.adjust_shrink = break_adjust_shrink
+ if not post then
+ p = getnext(p)
+ else
+ return
+ end
+ end
+ while p do -- skip spacing etc
+ local id = getid(p)
+ if id == glyph_code then
+ return -- happens often
+ elseif id == glue_code then
+ local spec = getfield(p,"spec")
+ local order = stretch_orders[getfield(spec,"stretch_order")]
+ break_width.size = break_width.size - getfield(spec,"width")
+ break_width[order] = break_width[order] - getfield(spec,"stretch")
+ break_width.shrink = break_width.shrink - getfield(spec,"shrink")
+ elseif id == penalty_code then
+ -- do nothing
+ elseif id == kern_code then
+ if getsubtype(p) == userkern_code then
+ break_width.size = break_width.size - getfield(p,"kern")
+ else
+ return
+ end
+ elseif id == math_code then
+ break_width.size = break_width.size - getfield(p,"surround")
+ else
+ return
+ end
+ p = getnext(p)
+ end
+end
+
+local function append_to_vlist(par, b)
+ local prev_depth = par.prev_depth
+ if prev_depth > par.ignored_dimen then
+ if getid(b) == hlist_code then
+ local d = getfield(par.baseline_skip,"width") - prev_depth - getfield(b,"height") -- deficiency of space between baselines
+ local s = d < par.line_skip_limit and new_lineskip(par.lineskip) or new_baselineskip(d)
+ -- local s = d < par.line_skip_limit
+ -- if s then
+ -- s = new_lineskip()
+ -- setfield(s,"spec",tex.lineskip)
+ -- else
+ -- s = new_baselineskip(d)
+ -- end
+ local head_field = par.head_field
+ if head_field then
+ local n = slide_nodelist(head_field) -- todo: find_tail
+ setfield(n,"next",s)
+ setfield(s,"prev",n)
+ else
+ par.head_field = s
+ end
+ end
+ end
+ local head_field = par.head_field
+ if head_field then
+ local n = slide_nodelist(head_field) -- todo: find_tail
+ setfield(n,"next",b)
+ setfield(b,"prev",n)
+ else
+ par.head_field = b
+ end
+ if getid(b) == hlist_code then
+ local pd = getfield(b,"depth")
+ par.prev_depth = pd
+ texnest[texnest.ptr].prevdepth = pd
+ end
+end
+
+local function append_list(par, b)
+ local head_field = par.head_field
+ if head_field then
+ local n = slide_nodelist(head_field) -- todo: find_tail
+ setfield(n,"next",b)
+ setfield(b,"prev",n)
+ else
+ par.head_field = b
+ end
+end
+
+-- We can actually make par local to this module as we never break inside a break call and that way the
+-- array is reused. At some point the information will be part of the paragraph spec as passed.
+
+local hztolerance = 2500
+local hzwarned = false
+
+local function used_skip(s)
+ return s and (getfield(s,"width") ~= 0 or getfield(s,"stretch") ~= 0 or getfield(s,"shrink") ~= 0) and s or nil
+end
+
+local function initialize_line_break(head,display)
+
+ local hang_indent = tex.hangindent or 0
+ local hsize = tex.hsize or 0
+ local hang_after = tex.hangafter or 0
+ local par_shape_ptr = tex.parshape
+ local left_skip = tonut(tex.leftskip) -- nodes
+ local right_skip = tonut(tex.rightskip) -- nodes
+ local pretolerance = tex.pretolerance
+ local tolerance = tex.tolerance
+ local adjust_spacing = tex.pdfadjustspacing
+ local protrude_chars = tex.pdfprotrudechars
+ local last_line_fit = tex.lastlinefit
+
+ local newhead = new_temp()
+ setfield(newhead,"next",head)
+
+ local adjust_spacing_status = adjust_spacing > 1 and -1 or 0
+
+ -- metatables
+
+ local par = {
+ head = newhead,
+ head_field = nil,
+ display = display,
+ font_in_short_display = 0,
+ no_shrink_error_yet = true, -- have we complained about infinite shrinkage?
+ second_pass = false, -- is this our second attempt to break this paragraph?
+ final_pass = false, -- is this our final attempt to break this paragraph?
+ threshold = 0, -- maximum badness on feasible lines
+
+ passive = nil, -- most recent node on passive list
+ printed_node = head, -- most recent node that has been printed
+ pass_number = 0, -- the number of passive nodes allocated on this pass
+ auto_breaking = 0, -- make auto_breaking accessible out of line_break
+
+ active_width = { size = 0, stretch = 0, fi = 0, fil = 0, fill = 0, filll = 0, shrink = 0, adjust_stretch = 0, adjust_shrink = 0 },
+ break_width = { size = 0, stretch = 0, fi = 0, fil = 0, fill = 0, filll = 0, shrink = 0, adjust_stretch = 0, adjust_shrink = 0 },
+ disc_width = { size = 0, adjust_stretch = 0, adjust_shrink = 0 },
+ fill_width = { stretch = 0, fi = 0, fil = 0, fill = 0, filll = 0, shrink = 0 },
+ background = { size = 0, stretch = 0, fi = 0, fil = 0, fill = 0, filll = 0, shrink = 0 },
+
+ hang_indent = hang_indent,
+ hsize = hsize,
+ hang_after = hang_after,
+ par_shape_ptr = par_shape_ptr,
+ left_skip = left_skip,
+ right_skip = right_skip,
+ pretolerance = pretolerance,
+ tolerance = tolerance,
+
+ protrude_chars = protrude_chars,
+ adjust_spacing = adjust_spacing,
+ max_stretch_ratio = adjust_spacing_status,
+ max_shrink_ratio = adjust_spacing_status,
+ cur_font_step = adjust_spacing_status,
+ checked_expansion = false,
+ tracing_paragraphs = tex.tracingparagraphs > 0,
+
+ emergency_stretch = tex.emergencystretch or 0,
+ looseness = tex.looseness or 0,
+ line_penalty = tex.linepenalty or 0,
+ hyphen_penalty = tex.hyphenpenalty or 0,
+ broken_penalty = tex.brokenpenalty or 0,
+ inter_line_penalty = tex.interlinepenalty or 0,
+ club_penalty = tex.clubpenalty or 0,
+ widow_penalty = tex.widowpenalty or 0,
+ display_widow_penalty = tex.displaywidowpenalty or 0,
+ ex_hyphen_penalty = tex.exhyphenpenalty or 0,
+
+ adj_demerits = tex.adjdemerits or 0,
+ double_hyphen_demerits = tex.doublehyphendemerits or 0,
+ final_hyphen_demerits = tex.finalhyphendemerits or 0,
+
+ first_line = 0, -- tex.nest[tex.nest.ptr].modeline, -- 0, -- cur_list.pg_field
+
+ each_line_height = tex.pdfeachlineheight or 0, -- this will go away
+ each_line_depth = tex.pdfeachlinedepth or 0, -- this will go away
+ first_line_height = tex.pdffirstlineheight or 0, -- this will go away
+ last_line_depth = tex.pdflastlinedepth or 0, -- this will go away
+ ignored_dimen = tex.pdfignoreddimen or 0, -- this will go away
+
+ baseline_skip = tonut(tex.baselineskip),
+ lineskip = tonut(tex.lineskip),
+ line_skip_limit = tex.lineskiplimit,
+
+ prev_depth = texnest[texnest.ptr].prevdepth,
+
+ final_par_glue = slide_nodelist(head), -- todo: we know tail already, slow
+
+ par_break_dir = tex.pardir,
+ line_break_dir = tex.pardir,
+
+ internal_pen_inter = 0, -- running localinterlinepenalty
+ internal_pen_broken = 0, -- running localbrokenpenalty
+ internal_left_box = nil, -- running localleftbox
+ internal_left_box_width = 0, -- running localleftbox width
+ init_internal_left_box = nil, -- running localleftbox
+ init_internal_left_box_width = 0, -- running localleftbox width
+ internal_right_box = nil, -- running localrightbox
+ internal_right_box_width = 0, -- running localrightbox width
+
+ best_place = { }, -- how to achieve minimal_demerits
+ best_pl_line = { }, -- corresponding line number
+ easy_line = 0, -- line numbers easy_line are equivalent in break nodes
+ last_special_line = 0, -- line numbers last_special_line all have the same width
+ first_width = 0, -- the width of all lines last_special_line, if no parshape has been specified
+ second_width = 0, -- the width of all lines last_special_line
+ first_indent = 0, -- left margin to go with first_width
+ second_indent = 0, -- left margin to go with second_width
+
+ best_bet = nil, -- use this passive node and its predecessors
+ fewest_demerits = 0, -- the demerits associated with best_bet
+ best_line = 0, -- line number following the last line of the new paragraph
+ line_diff = 0, -- the difference between the current line number and the optimum best_line
+
+ -- not yet used
+
+ best_pl_short = { }, -- shortfall corresponding to minimal_demerits
+ best_pl_glue = { }, -- corresponding glue stretch or shrink
+ do_last_line_fit = false,
+ last_line_fit = last_line_fit,
+
+ minimum_demerits = awful_badness,
+
+ minimal_demerits = {
+
+ [fit_very_loose_class] = awful_badness,
+ [fit_loose_class] = awful_badness,
+ [fit_decent_class] = awful_badness,
+ [fit_tight_class] = awful_badness,
+
+ },
+
+ prev_char_p = nil,
+
+ statistics = {
+
+ noflines = 0,
+ nofprotrudedlines = 0,
+ nofadjustedlines = 0,
+
+ },
+
+ -- -- just a thought ... parshape functions ... it would be nice to
+ -- -- also store the height so far (probably not too hard) although
+ -- -- in most cases we work on grids in such cases
+ --
+ -- adapt_width = function(par,line)
+ -- -- carry attribute, so that we can accumulate
+ -- local left = 655360 * (line - 1)
+ -- local right = 655360 * (line - 1)
+ -- return left, right
+ -- end
+
+ }
+
+ -- optimizers
+
+ par.used_left_skip = used_skip(par.left_skip)
+ par.used_right_skip = used_skip(par.right_skip)
+
+ -- so far
+
+ if adjust_spacing > 1 then
+ local checked_expansion = { par = par }
+ setmetatableindex(checked_expansion,check_expand_pars)
+ par.checked_expansion = checked_expansion
+
+ if par.tolerance < hztolerance then
+ if not hzwarned then
+ report_parbuilders("setting tolerance to %a for hz",hztolerance)
+ hzwarned = true
+ end
+ par.tolerance = hztolerance
+ end
+
+ end
+
+ -- we need par for the error message
+
+ local background = par.background
+
+ local l = check_shrinkage(par,left_skip)
+ local r = check_shrinkage(par,right_skip)
+ local l_order = stretch_orders[getfield(l,"stretch_order")]
+ local r_order = stretch_orders[getfield(r,"stretch_order")]
+
+ background.size = getfield(l,"width") + getfield(r,"width")
+ background.shrink = getfield(l,"shrink") + getfield(r,"shrink")
+ background[l_order] = getfield(l,"stretch")
+ background[r_order] = getfield(r,"stretch") + background[r_order]
+
+ -- this will move up so that we can assign the whole par table
+
+ if not par_shape_ptr then
+ if hang_indent == 0 then
+ par.second_width = hsize
+ par.second_indent = 0
+ else
+ local abs_hang_after = hang_after >0 and hang_after or -hang_after
+ local abs_hang_indent = hang_indent>0 and hang_indent or -hang_indent
+ par.last_special_line = abs_hang_after
+ if hang_after < 0 then
+ par.first_width = hsize - abs_hang_indent
+ if hang_indent >= 0 then
+ par.first_indent = hang_indent
+ else
+ par.first_indent = 0
+ end
+ par.second_width = hsize
+ par.second_indent = 0
+ else
+ par.first_width = hsize
+ par.first_indent = 0
+ par.second_width = hsize - abs_hang_indent
+ if hang_indent >= 0 then
+ par.second_indent = hang_indent
+ else
+ par.second_indent = 0
+ end
+ end
+ end
+ else
+ local last_special_line = #par_shape_ptr
+ par.last_special_line = last_special_line
+ local parshape = par_shape_ptr[last_special_line]
+ par.second_width = parshape[2]
+ par.second_indent = parshape[1]
+ end
+
+ if par.looseness == 0 then
+ par.easy_line = par.last_special_line
+ else
+ par.easy_line = max_halfword
+ end
+
+ if pretolerance >= 0 then
+ par.threshold = pretolerance
+ par.second_pass = false
+ par.final_pass = false
+ else
+ par.threshold = tolerance
+ par.second_pass = true
+ par.final_pass = par.emergency_stretch <= 0
+ if trace_basic then
+ if par.final_pass then
+ report_parbuilders("enabling second and final pass")
+ else
+ report_parbuilders("enabling second pass")
+ end
+ end
+ end
+
+ if last_line_fit > 0 then
+ local spec = par.final_par_glue.spec
+ local stretch = spec.stretch
+ local stretch_order = spec.stretch_order
+ if stretch > 0 and stretch_order > 0 and background.fi == 0 and background.fil == 0 and background.fill == 0 and background.filll == 0 then
+ par.do_last_line_fit = true
+ local si = stretch_orders[stretch_order]
+ if trace_lastlinefit or trace_basic then
+ report_parbuilders("enabling last line fit, stretch order %a set to %a, linefit is %a",si,stretch,last_line_fit)
+ end
+ par.fill_width[si] = stretch
+ end
+ end
+
+ return par
+end
+
+-- there are still all kind of artefacts in here (a side effect I guess of pdftex,
+-- etex, omega and other extensions that got obscured by patching)
+
+local function post_line_break(par)
+
+ local prevgraf = texnest[texnest.ptr].prevgraf
+ local current_line = prevgraf + 1 -- the current line number being justified
+
+ local adjust_spacing = par.adjust_spacing
+ local protrude_chars = par.protrude_chars
+ local statistics = par.statistics
+
+ local stack = new_dir_stack()
+
+ local leftskip = par.used_left_skip -- used or normal ?
+ local rightskip = par.right_skip
+ local parshape = par.par_shape_ptr
+ local ignored_dimen = par.ignored_dimen
+
+ local adapt_width = par.adapt_width
+
+ -- reverse the links of the relevant passive nodes, goto first breakpoint
+
+ local current_break = nil
+
+ local break_node = par.best_bet.break_node
+ repeat
+ local first_break = break_node
+ break_node = break_node.prev_break
+ first_break.prev_break = current_break
+ current_break = first_break
+ until not break_node
+
+ local head = par.head
+
+ -- maybe : each_...
+
+ while current_break do
+
+ inject_dirs_at_begin_of_line(stack,head)
+
+ local disc_break = false
+ local post_disc_break = false
+ local glue_break = false
+
+ local lineend = nil -- q lineend refers to the last node of the line (and paragraph)
+ local lastnode = current_break.cur_break -- r lastnode refers to the node after which the dir nodes should be closed
+
+ if not lastnode then
+ -- only at the end
+ lastnode = slide_nodelist(head) -- todo: find_tail
+ if lastnode == par.final_par_glue then
+ lineend = lastnode
+ lastnode = getprev(lastnode)
+ end
+ else -- todo: use insert_list_after
+ local id = getid(lastnode)
+ if id == glue_code then
+ -- lastnode is normal skip
+ lastnode = replace_node(lastnode,new_rightskip(rightskip))
+ glue_break = true
+ lineend = lastnode
+ lastnode = getprev(r)
+ elseif id == disc_code then
+ local prevlast = getprev(lastnode)
+ local nextlast = getnext(lastnode)
+ local subtype = getsubtype(lastnode)
+ local pre = getfield(lastnode,"pre")
+ local post = getfield(lastnode,"post")
+ local replace = getfield(lastnode,"replace")
+ if subtype == second_disc_code then
+ if not (getid(prevlast) == disc_code and getsubtype(prevlast) == first_disc_code) then
+ report_parbuilders('unsupported disc at location %a',3)
+ end
+ if pre then
+ flush_nodelist(pre)
+ setfield(lastnode,"pre",nil)
+ pre = nil -- signal
+ end
+ if replace then
+ local n = find_tail(replace)
+ setfield(prevlast,"next",replace)
+ setfield(replace,"prev",prevlast)
+ setfield(n,"next",lastnode)
+ setfield(lastnode,"prev",n)
+ setfield(lastnode,"replace",nil)
+ replace = nil -- signal
+ end
+ local pre = getfield(prevlast,"pre")
+ local post = getfield(prevlast,"post")
+ local replace = getfield(prevlast,"replace")
+ if pre then
+ flush_nodelist(pre)
+ setfield(prevlast,"pre",nil)
+ end
+ if replace then
+ flush_nodelist(replace)
+ setfield(prevlast,"replace",nil)
+ end
+ if post then
+ flush_nodelist(post)
+ setfield(prevlast,"post",nil)
+ end
+ elseif subtype == first_disc_code then
+ if not (getid(v) == disc_code and getsubtype(v) == second_disc_code) then
+ report_parbuilders('unsupported disc at location %a',4)
+ end
+ setfield(nextlast,"subtype",regular_disc_code)
+ setfield(nextlast,"replace",post)
+ setfield(lastnode,"post",nil)
+ end
+ if replace then
+ setfield(lastnode,"replace",nil) -- free
+ flush_nodelist(replace)
+ end
+ if pre then
+ local n = find_tail(pre)
+ setfield(prevlast,"next",pre)
+ setfield(pre,"prev",prevlast)
+ setfield(n,"next",lastnode)
+ setfield(lastnode,"prev",n)
+ setfield(lastnode,"pre",nil)
+ end
+ if post then
+ local n = find_tail(post)
+ setfield(lastnode,"next",post)
+ setfield(post,"prev",lastnode)
+ setfield(n,"next",nextlast)
+ setfield(nextlast,"prev",n)
+ setfield(lastnode,"post",nil)
+ post_disc_break = true
+ end
+ disc_break = true
+ elseif id == kern_code then
+ setfield(lastnode,"kern",0)
+ elseif getid(lastnode) == math_code then
+ setfield(lastnode,"surround",0)
+ end
+ end
+ lastnode = inject_dirs_at_end_of_line(stack,lastnode,getnext(head),current_break.cur_break)
+ local rightbox = current_break.passive_right_box
+ if rightbox then
+ lastnode = insert_node_after(lastnode,lastnode,copy_node(rightbox))
+ end
+ if not lineend then
+ lineend = lastnode
+ end
+ if lineend and lineend ~= head and protrude_chars > 0 then
+ local id = getid(lineend)
+ local c = (disc_break and (id == glyph_code or id ~= disc_code) and lineend) or getprev(lineend)
+ local p = find_protchar_right(getnext(head),c)
+ if p and getid(p) == glyph_code then
+ local w, last_rightmost_char = right_pw(p)
+ if last_rightmost_char and w ~= 0 then
+ -- so we inherit attributes, lineend is new pseudo head
+ lineend, c = insert_node_after(lineend,c,new_rightmarginkern(copy_node(last_rightmost_char),-w))
+ end
+ end
+ end
+ -- we finish the line
+ local r = getnext(lineend)
+ setfield(lineend,"next",nil)
+ if not glue_break then
+ if rightskip then
+ insert_node_after(lineend,lineend,new_rightskip(right_skip)) -- lineend moves on as pseudo head
+ end
+ end
+ -- each time ?
+ local q = getnext(head)
+ setfield(head,"next",r)
+ if r then
+ setfield(r,"prev",head)
+ end
+ -- insert leftbox (if needed after parindent)
+ local leftbox = current_break.passive_left_box
+ if leftbox then
+ local first = getnext(q)
+ if first and current_line == (par.first_line + 1) and getid(first) == hlist_code and not getlist(first) then
+ insert_node_after(q,q,copy_node(leftbox))
+ else
+ q = insert_node_before(q,q,copy_node(leftbox))
+ end
+ end
+ if protrude_chars > 0 then
+ local p = find_protchar_left(q)
+ if p and getid(p) == glyph_code then
+ local w, last_leftmost_char = left_pw(p)
+ if last_leftmost_char and w ~= 0 then
+ -- so we inherit attributes, q is pseudo head and moves back
+ q = insert_node_before(q,q,new_leftmarginkern(copy_node(last_leftmost_char),-w))
+ end
+ end
+ end
+ if leftskip then
+ q = insert_node_before(q,q,new_leftskip(leftskip))
+ end
+ local cur_width, cur_indent
+ if current_line > par.last_special_line then
+ cur_indent = par.second_indent
+ cur_width = par.second_width
+ elseif parshape then
+ local shape = parshape[current_line]
+ cur_indent = shape[1]
+ cur_width = shape[2]
+ else
+ cur_indent = par.first_indent
+ cur_width = par.first_width
+ end
+
+ if adapt_width then -- extension
+ local l, r = adapt_width(par,current_line)
+ cur_indent = cur_indent + l
+ cur_width = cur_width - l - r
+ end
+
+ statistics.noflines = statistics.noflines + 1
+ local finished_line = nil
+ if adjust_spacing > 0 then
+ statistics.nofadjustedlines = statistics.nofadjustedlines + 1
+ finished_line = xpack_nodes(q,cur_width,"cal_expand_ratio",par.par_break_dir,par.first_line,current_line) -- ,current_break.analysis)
+ else
+ finished_line = xpack_nodes(q,cur_width,"exactly",par.par_break_dir,par.first_line,current_line) -- ,current_break.analysis)
+ end
+ if protrude_chars > 0 then
+ statistics.nofprotrudedlines = statistics.nofprotrudedlines + 1
+ end
+ -- wrong:
+ local adjust_head = texlists.adjust_head
+ local pre_adjust_head = texlists.pre_adjust_head
+ --
+ setfield(finished_line,"shift",cur_indent)
+ -- this will probably go away:
+ if par.each_line_height ~= ignored_dimen then
+ setfield(finished_line,"height",par.each_line_height)
+ end
+ if par.each_line_depth ~= ignored_dimen then
+ setfield(finished_line,"depth",par.each_line_depth)
+ end
+ if par.first_line_height ~= ignored_dimen and (current_line == par.first_line + 1) then
+ setfield(finished_line,"height",par.first_line_height)
+ end
+ if par.last_line_depth ~= ignored_dimen and current_line + 1 == par.best_line then
+ setfield(finished_line,"depth",par.last_line_depth)
+ end
+ --
+ if texlists.pre_adjust_head ~= pre_adjust_head then
+ append_list(par, texlists.pre_adjust_head)
+ texlists.pre_adjust_head = pre_adjust_head
+ end
+ append_to_vlist(par,finished_line)
+ if texlists.adjust_head ~= adjust_head then
+ append_list(par, texlists.adjust_head)
+ texlists.adjust_head = adjust_head
+ end
+ --
+ local pen
+ if current_line + 1 ~= par.best_line then
+ if current_break.passive_pen_inter then
+ pen = current_break.passive_pen_inter
+ else
+ pen = par.inter_line_penalty
+ end
+ if current_line == prevgraf + 1 then
+ pen = pen + par.club_penalty
+ end
+ if current_line + 2 == par.best_line then
+ if par.display then
+ pen = pen + par.display_widow_penalty
+ else
+ pen = pen + par.widow_penalty
+ end
+ end
+ if disc_break then
+ if current_break.passive_pen_broken ~= 0 then
+ pen = pen + current_break.passive_pen_broken
+ else
+ pen = pen + par.broken_penalty
+ end
+ end
+ if pen ~= 0 then
+ append_to_vlist(par,new_penalty(pen))
+ end
+ end
+ current_line = current_line + 1
+ current_break = current_break.prev_break
+ if current_break and not post_disc_break then
+ local current = head
+ local next = nil
+ while true do
+ next = getnext(current)
+ if next == current_break.cur_break or getid(next) == glyph_code then
+ break
+ end
+ local id = getid(next)
+ local subtype = getsubtype(next)
+ if id == whatsit_code and subtype == localpar_code then
+ -- nothing
+ elseif id < math_code then
+ -- messy criterium
+ break
+elseif id == math_code then
+ -- keep the math node
+ setfield(next,"surround",0)
+ break
+ elseif id == kern_code and (subtype ~= userkern_code and not getattr(next,a_fontkern)) then
+ -- fontkerns and accent kerns as well as otf injections
+ break
+ end
+ current = next
+ end
+ if current ~= head then
+ setfield(current,"next",nil)
+ flush_nodelist(getnext(head))
+ setfield(head,"next",next)
+ if next then
+ setfield(next,"prev",head)
+ end
+ end
+ end
+ end
+ -- if current_line ~= par.best_line then
+ -- report_parbuilders("line breaking")
+ -- end
+ par.head = nil -- needs checking
+ current_line = current_line - 1
+ if trace_basic then
+ report_parbuilders("paragraph broken into %a lines",current_line)
+ end
+ texnest[texnest.ptr].prevgraf = current_line
+end
+
+local function wrap_up(par)
+ if par.tracing_paragraphs then
+ diagnostics.stop()
+ end
+ if par.do_last_line_fit then
+ local best_bet = par.best_bet
+ local active_short = best_bet.active_short
+ local active_glue = best_bet.active_glue
+ if active_short == 0 then
+ if trace_lastlinefit then
+ report_parbuilders("disabling last line fit, no active_short")
+ end
+ par.do_last_line_fit = false
+ else
+ local glue = par.final_par_glue
+ local spec = copy_node(getfield(glue,"spec"))
+ setfield(spec,"width",getfield(spec,"width") + active_short - active_glue)
+ setfield(spec,"stretch",0)
+ -- flush_node(getfield(glue,"spec")) -- brrr, when we do this we can get an "invalid id stretch message", maybe dec refcount
+ setfield(glue,"spec",spec)
+ if trace_lastlinefit then
+ report_parbuilders("applying last line fit, short %a, glue %p",active_short,active_glue)
+ end
+ end
+ end
+ -- we have a bunch of glue and and temp nodes not freed
+ local head = par.head
+ if getid(head) == temp_code then
+ par.head = getnext(head)
+ flush_node(head)
+ end
+ post_line_break(par)
+ reset_meta(par)
+ register_statistics(par)
+ return par.head_field
+end
+
+-- we could do active nodes differently ... table instead of linked list or a list
+-- with prev nodes but it doesn't save much (as we still need to keep indices then
+-- in next)
+
+local function deactivate_node(par,prev_prev_r,prev_r,r,cur_active_width,checked_expansion) -- no need for adjust if disabled
+ local active = par.active
+ local active_width = par.active_width
+ prev_r.next = r.next
+ -- removes r
+ -- r = nil
+ if prev_r == active then
+ r = active.next
+ if r.id == delta_code then
+ local aw = active_width.size + r.size active_width.size = aw cur_active_width.size = aw
+ local aw = active_width.stretch + r.stretch active_width.stretch = aw cur_active_width.stretch = aw
+ local aw = active_width.fi + r.fi active_width.fi = aw cur_active_width.fi = aw
+ local aw = active_width.fil + r.fil active_width.fil = aw cur_active_width.fil = aw
+ local aw = active_width.fill + r.fill active_width.fill = aw cur_active_width.fill = aw
+ local aw = active_width.filll + r.filll active_width.filll = aw cur_active_width.filll = aw
+ local aw = active_width.shrink + r.shrink active_width.shrink = aw cur_active_width.shrink = aw
+ if checked_expansion then
+ local aw = active_width.adjust_stretch + r.adjust_stretch active_width.adjust_stretch = aw cur_active_width.adjust_stretch = aw
+ local aw = active_width.adjust_shrink + r.adjust_shrink active_width.adjust_shrink = aw cur_active_width.adjust_shrink = aw
+ end
+ active.next = r.next
+ -- removes r
+ -- r = nil
+ end
+ elseif prev_r.id == delta_code then
+ r = prev_r.next
+ if r == active then
+ cur_active_width.size = cur_active_width.size - prev_r.size
+ cur_active_width.stretch = cur_active_width.stretch - prev_r.stretch
+ cur_active_width.fi = cur_active_width.fi - prev_r.fi
+ cur_active_width.fil = cur_active_width.fil - prev_r.fil
+ cur_active_width.fill = cur_active_width.fill - prev_r.fill
+ cur_active_width.filll = cur_active_width.filll - prev_r.filll
+ cur_active_width.shrink = cur_active_width.shrink - prev_r.shrink
+ if checked_expansion then
+ cur_active_width.adjust_stretch = cur_active_width.adjust_stretch - prev_r.adjust_stretch
+ cur_active_width.adjust_shrink = cur_active_width.adjust_shrink - prev_r.adjust_shrink
+ end
+ prev_prev_r.next = active
+ -- removes prev_r
+ -- prev_r = nil
+ prev_r = prev_prev_r
+ elseif r.id == delta_code then
+ local rn = r.size cur_active_width.size = cur_active_width.size + rn prev_r.size = prev_r.size + rn
+ local rn = r.stretch cur_active_width.stretch = cur_active_width.stretch + rn prev_r.stretch = prev_r.stretch + rn
+ local rn = r.fi cur_active_width.fi = cur_active_width.fi + rn prev_r.fi = prev_r.fi + rn
+ local rn = r.fil cur_active_width.fil = cur_active_width.fil + rn prev_r.fil = prev_r.fil + rn
+ local rn = r.fill cur_active_width.fill = cur_active_width.fill + rn prev_r.fill = prev_r.fill + rn
+ local rn = r.filll cur_active_width.filll = cur_active_width.filll + rn prev_r.filll = prev_r.fill + rn
+ local rn = r.shrink cur_active_width.shrink = cur_active_width.shrink + rn prev_r.shrink = prev_r.shrink + rn
+ if checked_expansion then
+ local rn = r.adjust_stretch cur_active_width.adjust_stretch = cur_active_width.adjust_stretch + rn prev_r.adjust_stretch = prev_r.adjust_stretch + rn
+ local rn = r.adjust_shrink cur_active_width.adjust_shrink = cur_active_width.adjust_shrink + rn prev_r.adjust_shrink = prev_r.adjust_shrink + rn
+ end
+ prev_r.next = r.next
+ -- removes r
+ -- r = nil
+ end
+ end
+ return prev_r, r
+end
+
+local function lastlinecrap(shortfall,active_short,active_glue,cur_active_width,fill_width,last_line_fit)
+ if active_short == 0 or active_glue <= 0 then
+ return false, 0, fit_decent_class, 0, 0
+ end
+ if cur_active_width.fi ~= fill_width.fi or cur_active_width.fil ~= fill_width.fil or cur_active_width.fill ~= fill_width.fill or cur_active_width.filll ~= fill_width.filll then
+ return false, 0, fit_decent_class, 0, 0
+ end
+ local adjustment = active_short > 0 and cur_active_width.stretch or cur_active_width.shrink
+ if adjustment <= 0 then
+ return false, 0, fit_decent_class, adjustment, 0
+ end
+ adjustment = calculate_fraction(adjustment,active_short,active_glue,maxdimen)
+ if last_line_fit < 1000 then
+ adjustment = calculate_fraction(adjustment,last_line_fit,1000,maxdimen) -- uses previous adjustment
+ end
+ local fit_class = fit_decent_class
+ if adjustment > 0 then
+ local stretch = cur_active_width.stretch
+ if adjustment > shortfall then
+ adjustment = shortfall
+ end
+ if adjustment > 7230584 and stretch < 1663497 then
+ return true, fit_very_loose_class, shortfall, adjustment, infinite_badness
+ end
+ -- if adjustment == 0 then -- badness = 0
+ -- return true, shortfall, fit_decent_class, 0, 0
+ -- elseif stretch <= 0 then -- badness = 10000
+ -- return true, shortfall, fit_very_loose_class, adjustment, 10000
+ -- end
+ -- local badness = (adjustment == 0 and 0) or (stretch <= 0 and 10000) or calculate_badness(adjustment,stretch)
+ local badness = calculate_badness(adjustment,stretch)
+ if badness > 99 then
+ return true, shortfall, fit_very_loose_class, adjustment, badness
+ elseif badness > 12 then
+ return true, shortfall, fit_loose_class, adjustment, badness
+ else
+ return true, shortfall, fit_decent_class, adjustment, badness
+ end
+ elseif adjustment < 0 then
+ local shrink = cur_active_width.shrink
+ if -adjustment > shrink then
+ adjustment = -shrink
+ end
+ local badness = calculate_badness(-adjustment,shrink)
+ if badness > 12 then
+ return true, shortfall, fit_tight_class, adjustment, badness
+ else
+ return true, shortfall, fit_decent_class, adjustment, badness
+ end
+ else
+ return false, 0, fit_decent_class, 0, 0
+ end
+end
+
+-- todo: statistics .. count tries and so
+
+local trialcount = 0
+
+local function try_break(pi, break_type, par, first_p, current, checked_expansion)
+
+-- trialcount = trialcount + 1
+-- print(trialcount,pi,break_type,current,nuts.tostring(current))
+
+ if pi >= infinite_penalty then -- this breakpoint is inhibited by infinite penalty
+ local p_active = par.active
+ return p_active, p_active and p_active.next
+ elseif pi <= -infinite_penalty then -- this breakpoint will be forced
+ pi = eject_penalty
+ end
+
+ local prev_prev_r = nil -- a step behind prev_r, if type(prev_r)=delta_code
+ local prev_r = par.active -- stays a step behind r
+ local r = nil -- runs through the active list
+ local no_break_yet = true -- have we found a feasible break at current?
+ local node_r_stays_active = false -- should node r remain in the active list?
+ local line_width = 0 -- the current line will be justified to this width
+ local line_number = 0 -- line number of current active node
+ local old_line_number = 0 -- maximum line number in current equivalence class of lines
+
+ local protrude_chars = par.protrude_chars
+ local checked_expansion = par.checked_expansion
+ local break_width = par.break_width
+ local active_width = par.active_width
+ local background = par.background
+ local minimal_demerits = par.minimal_demerits
+ local best_place = par.best_place
+ local best_pl_line = par.best_pl_line
+ local best_pl_short = par.best_pl_short
+ local best_pl_glue = par.best_pl_glue
+ local do_last_line_fit = par.do_last_line_fit
+ local final_pass = par.final_pass
+ local tracing_paragraphs = par.tracing_paragraphs
+ -- local par_active = par.active
+
+ local adapt_width = par.adapt_width
+
+ local parshape = par.par_shape_ptr
+
+ local cur_active_width = checked_expansion and { -- distance from current active node
+ size = active_width.size,
+ stretch = active_width.stretch,
+ fi = active_width.fi,
+ fil = active_width.fil,
+ fill = active_width.fill,
+ filll = active_width.filll,
+ shrink = active_width.shrink,
+ adjust_stretch = active_width.adjust_stretch,
+ adjust_shrink = active_width.adjust_shrink,
+ } or {
+ size = active_width.size,
+ stretch = active_width.stretch,
+ fi = active_width.fi,
+ fil = active_width.fil,
+ fill = active_width.fill,
+ filll = active_width.filll,
+ shrink = active_width.shrink,
+ }
+
+ while true do
+ r = prev_r.next
+ if r.id == delta_code then
+ cur_active_width.size = cur_active_width.size + r.size
+ cur_active_width.stretch = cur_active_width.stretch + r.stretch
+ cur_active_width.fi = cur_active_width.fi + r.fi
+ cur_active_width.fil = cur_active_width.fil + r.fil
+ cur_active_width.fill = cur_active_width.fill + r.fill
+ cur_active_width.filll = cur_active_width.filll + r.filll
+ cur_active_width.shrink = cur_active_width.shrink + r.shrink
+ if checked_expansion then
+ cur_active_width.adjust_stretch = cur_active_width.adjust_stretch + r.adjust_stretch
+ cur_active_width.adjust_shrink = cur_active_width.adjust_shrink + r.adjust_shrink
+ end
+ prev_prev_r = prev_r
+ prev_r = r
+ else
+ line_number = r.line_number
+ if line_number > old_line_number then
+ local minimum_demerits = par.minimum_demerits
+ if minimum_demerits < awful_badness and (old_line_number ~= par.easy_line or r == par.active) then
+ if no_break_yet then
+ no_break_yet = false
+ break_width.size = background.size
+ break_width.stretch = background.stretch
+ break_width.fi = background.fi
+ break_width.fil = background.fil
+ break_width.fill = background.fill
+ break_width.filll = background.filll
+ break_width.shrink = background.shrink
+ if checked_expansion then
+ break_width.adjust_stretch = 0
+ break_width.adjust_shrink = 0
+ end
+ if current then
+ compute_break_width(par,break_type,current)
+ end
+ end
+ if prev_r.id == delta_code then
+ prev_r.size = prev_r.size - cur_active_width.size + break_width.size
+ prev_r.stretch = prev_r.stretch - cur_active_width.stretc + break_width.stretch
+ prev_r.fi = prev_r.fi - cur_active_width.fi + break_width.fi
+ prev_r.fil = prev_r.fil - cur_active_width.fil + break_width.fil
+ prev_r.fill = prev_r.fill - cur_active_width.fill + break_width.fill
+ prev_r.filll = prev_r.filll - cur_active_width.filll + break_width.filll
+ prev_r.shrink = prev_r.shrink - cur_active_width.shrink + break_width.shrink
+ if checked_expansion then
+ prev_r.adjust_stretch = prev_r.adjust_stretch - cur_active_width.adjust_stretch + break_width.adjust_stretch
+ prev_r.adjust_shrink = prev_r.adjust_shrink - cur_active_width.adjust_shrink + break_width.adjust_shrink
+ end
+ elseif prev_r == par.active then
+ active_width.size = break_width.size
+ active_width.stretch = break_width.stretch
+ active_width.fi = break_width.fi
+ active_width.fil = break_width.fil
+ active_width.fill = break_width.fill
+ active_width.filll = break_width.filll
+ active_width.shrink = break_width.shrink
+ if checked_expansion then
+ active_width.adjust_stretch = break_width.adjust_stretch
+ active_width.adjust_shrink = break_width.adjust_shrink
+ end
+ else
+ local q = checked_expansion and {
+ id = delta_code,
+ subtype = nosubtype_code,
+ next = r,
+ size = break_width.size - cur_active_width.size,
+ stretch = break_width.stretch - cur_active_width.stretch,
+ fi = break_width.fi - cur_active_width.fi,
+ fil = break_width.fil - cur_active_width.fil,
+ fill = break_width.fill - cur_active_width.fill,
+ filll = break_width.filll - cur_active_width.filll,
+ shrink = break_width.shrink - cur_active_width.shrink,
+ adjust_stretch = break_width.adjust_stretch - cur_active_width.adjust_stretch,
+ adjust_shrink = break_width.adjust_shrink - cur_active_width.adjust_shrink,
+ } or {
+ id = delta_code,
+ subtype = nosubtype_code,
+ next = r,
+ size = break_width.size - cur_active_width.size,
+ stretch = break_width.stretch - cur_active_width.stretch,
+ fi = break_width.fi - cur_active_width.fi,
+ fil = break_width.fil - cur_active_width.fil,
+ fill = break_width.fill - cur_active_width.fill,
+ filll = break_width.filll - cur_active_width.filll,
+ shrink = break_width.shrink - cur_active_width.shrink,
+ }
+ prev_r.next = q
+ prev_prev_r = prev_r
+ prev_r = q
+ end
+ local adj_demerits = par.adj_demerits
+ local abs_adj_demerits = adj_demerits > 0 and adj_demerits or -adj_demerits
+ if abs_adj_demerits >= awful_badness - minimum_demerits then
+ minimum_demerits = awful_badness - 1
+ else
+ minimum_demerits = minimum_demerits + abs_adj_demerits
+ end
+ for fit_class = fit_very_loose_class, fit_tight_class do
+ if minimal_demerits[fit_class] <= minimum_demerits then
+ -- insert a new active node from best_place[fit_class] to current
+ par.pass_number = par.pass_number + 1
+ local prev_break = best_place[fit_class]
+ local passive = {
+ id = passive_code,
+ subtype = nosubtype_code,
+ next = par.passive,
+ cur_break = current,
+ serial = par.pass_number,
+ prev_break = prev_break,
+ passive_pen_inter = par.internal_pen_inter,
+ passive_pen_broken = par.internal_pen_broken,
+ passive_last_left_box = par.internal_left_box,
+ passive_last_left_box_width = par.internal_left_box_width,
+ passive_left_box = prev_break and prev_break.passive_last_left_box or par.init_internal_left_box,
+ passive_left_box_width = prev_break and prev_break.passive_last_left_box_width or par.init_internal_left_box_width,
+ passive_right_box = par.internal_right_box,
+ passive_right_box_width = par.internal_right_box_width,
+-- analysis = table.fastcopy(cur_active_width),
+ }
+ par.passive = passive
+ local q = {
+ id = break_type,
+ subtype = fit_class,
+ break_node = passive,
+ line_number = best_pl_line[fit_class] + 1,
+ total_demerits = minimal_demerits[fit_class], -- or 0,
+ next = r,
+ }
+ if do_last_line_fit then
+ local active_short = best_pl_short[fit_class]
+ local active_glue = best_pl_glue[fit_class]
+ q.active_short = active_short
+ q.active_glue = active_glue
+ if trace_lastlinefit then
+ report_parbuilders("setting short to %i and glue to %p using class %a",active_short,active_glue,fit_class)
+ end
+ end
+ -- q.next = r -- already done
+ prev_r.next = q
+ prev_r = q
+ if tracing_paragraphs then
+ diagnostics.break_node(par,q,fit_class,break_type,current)
+ end
+ end
+ minimal_demerits[fit_class] = awful_badness
+ end
+ par.minimum_demerits = awful_badness
+ if r ~= par.active then
+ local q = checked_expansion and {
+ id = delta_code,
+ subtype = nosubtype_code,
+ next = r,
+ size = cur_active_width.size - break_width.size,
+ stretch = cur_active_width.stretch - break_width.stretch,
+ fi = cur_active_width.fi - break_width.fi,
+ fil = cur_active_width.fil - break_width.fil,
+ fill = cur_active_width.fill - break_width.fill,
+ filll = cur_active_width.filll - break_width.filll,
+ shrink = cur_active_width.shrink - break_width.shrink,
+ adjust_stretch = cur_active_width.adjust_stretch - break_width.adjust_stretch,
+ adjust_shrink = cur_active_width.adjust_shrink - break_width.adjust_shrink,
+ } or {
+ id = delta_code,
+ subtype = nosubtype_code,
+ next = r,
+ size = cur_active_width.size - break_width.size,
+ stretch = cur_active_width.stretch - break_width.stretch,
+ fi = cur_active_width.fi - break_width.fi,
+ fil = cur_active_width.fil - break_width.fil,
+ fill = cur_active_width.fill - break_width.fill,
+ filll = cur_active_width.filll - break_width.filll,
+ shrink = cur_active_width.shrink - break_width.shrink,
+ }
+ -- q.next = r -- already done
+ prev_r.next = q
+ prev_prev_r = prev_r
+ prev_r = q
+ end
+ end
+ if r == par.active then
+ return r, r and r.next -- p_active, n_active
+ end
+ if line_number > par.easy_line then
+ old_line_number = max_halfword - 1
+ line_width = par.second_width
+ else
+ old_line_number = line_number
+ if line_number > par.last_special_line then
+ line_width = par.second_width
+ elseif parshape then
+ line_width = parshape[line_number][2]
+ else
+ line_width = par.first_width
+ end
+ end
+ if adapt_width then
+ local l, r = adapt_width(par,line_number)
+ line_width = line_width - l - r
+ end
+ end
+ local artificial_demerits = false -- has d been forced to zero
+ local shortfall = line_width - cur_active_width.size - par.internal_right_box_width -- used in badness calculations
+ if not r.break_node then
+ shortfall = shortfall - par.init_internal_left_box_width
+ else
+ shortfall = shortfall - (r.break_node.passive_last_left_box_width or 0)
+ end
+ local pw, lp, rp -- used later on
+ if protrude_chars > 1 then
+ -- this is quite time consuming
+ local b = r.break_node
+ local l = b and b.cur_break or first_p
+ local o = current and getprev(current)
+ if current and getid(current) == disc_code and getfield(current,"pre") then
+ o = find_tail(getfield(current,"pre"))
+ else
+ o = find_protchar_right(l,o)
+ end
+ if o and getid(o) == glyph_code then
+ pw, rp = right_pw(o)
+ shortfall = shortfall + pw
+ end
+ local id = getid(l)
+ if id == glyph_code then
+ -- ok ?
+ elseif id == disc_code and l.post then
+ l = l.post -- TODO: first char could be a disc
+ else
+ l = find_protchar_left(l)
+ end
+ if l and getid(l) == glyph_code then
+ pw, lp = left_pw(l)
+ shortfall = shortfall + pw
+ end
+ end
+ if checked_expansion and shortfall ~= 0 then
+ local margin_kern_stretch = 0
+ local margin_kern_shrink = 0
+ if protrude_chars > 1 then
+ if lp then
+ local data = expansions[getfont(lp)][getchar(lp)]
+ if data then
+ margin_kern_stretch, margin_kern_shrink = data.glyphstretch, data.glyphshrink
+ end
+ end
+ if rp then
+ local data = expansions[getfont(lp)][getchar(lp)]
+ if data then
+ margin_kern_stretch = margin_kern_stretch + data.glyphstretch
+ margin_kern_shrink = margin_kern_shrink + data.glyphshrink
+ end
+ end
+ end
+ local total = cur_active_width.adjust_stretch + margin_kern_stretch
+ if shortfall > 0 and total > 0 then
+ if total > shortfall then
+ shortfall = total / (par.max_stretch_ratio / par.cur_font_step) / 2
+ else
+ shortfall = shortfall - total
+ end
+ else
+ total = cur_active_width.adjust_shrink + margin_kern_shrink
+ if shortfall < 0 and total > 0 then
+ if total > - shortfall then
+ shortfall = - total / (par.max_shrink_ratio / par.cur_font_step) / 2
+ else
+ shortfall = shortfall + total
+ end
+ end
+ end
+ end
+ local b = 0
+ local g = 0
+ local fit_class = fit_decent_class
+ local found = false
+ if shortfall > 0 then
+ if cur_active_width.fi ~= 0 or cur_active_width.fil ~= 0 or cur_active_width.fill ~= 0 or cur_active_width.filll ~= 0 then
+ if not do_last_line_fit then
+ -- okay
+ elseif not current then
+ found, shortfall, fit_class, g, b = lastlinecrap(shortfall,r.active_short,r.active_glue,cur_active_width,par.fill_width,par.last_line_fit)
+ else
+ shortfall = 0
+ end
+ else
+ local stretch = cur_active_width.stretch
+ if shortfall > 7230584 and stretch < 1663497 then
+ b = infinite_badness
+ fit_class = fit_very_loose_class
+ else
+ b = calculate_badness(shortfall,stretch)
+ if b > 99 then
+ fit_class = fit_very_loose_class
+ elseif b > 12 then
+ fit_class = fit_loose_class
+ else
+ fit_class = fit_decent_class
+ end
+ end
+ end
+ else
+ local shrink = cur_active_width.shrink
+ if -shortfall > shrink then
+ b = infinite_badness + 1
+ else
+ b = calculate_badness(-shortfall,shrink)
+ end
+ if b > 12 then
+ fit_class = fit_tight_class
+ else
+ fit_class = fit_decent_class
+ end
+ end
+ if do_last_line_fit and not found then
+ if not current then
+ -- g = 0
+ shortfall = 0
+ elseif shortfall > 0 then
+ g = cur_active_width.stretch
+ elseif shortfall < 0 then
+ g = cur_active_width.shrink
+ else
+ g = 0
+ end
+ end
+ -- ::FOUND::
+ local continue_only = false -- brrr
+ if b > infinite_badness or pi == eject_penalty then
+ if final_pass and par.minimum_demerits == awful_badness and r.next == par.active and prev_r == par.active then
+ artificial_demerits = true -- set demerits zero, this break is forced
+ node_r_stays_active = false
+ elseif b > par.threshold then
+ prev_r, r = deactivate_node(par,prev_prev_r,prev_r,r,cur_active_width,checked_expansion)
+ continue_only = true
+ else
+ node_r_stays_active = false
+ end
+ else
+ prev_r = r
+ if b > par.threshold then
+ continue_only = true
+ else
+ node_r_stays_active = true
+ end
+ end
+ if not continue_only then
+ local d = 0
+ if not artificial_demerits then
+ d = par.line_penalty + b
+ if (d >= 0 and d or -d) >= 10000 then -- abs(d)
+ d = 100000000
+ else
+ d = d * d
+ end
+ if pi == 0 then
+ -- nothing
+ elseif pi > 0 then
+ d = d + pi * pi
+ elseif pi > eject_penalty then
+ d = d - pi * pi
+ end
+ if break_type == hyphenated_code and r.id == hyphenated_code then
+ if current then
+ d = d + par.double_hyphen_demerits
+ else
+ d = d + par.final_hyphen_demerits
+ end
+ end
+ local delta = fit_class - r.subtype
+ if (delta >= 0 and delta or -delta) > 1 then -- abs(delta)
+ d = d + par.adj_demerits
+ end
+ end
+ if tracing_paragraphs then
+ diagnostics.feasible_break(par,current,r,b,pi,d,artificial_demerits)
+ end
+ d = d + r.total_demerits -- this is the minimum total demerits from the beginning to current via r
+ if d <= minimal_demerits[fit_class] then
+ minimal_demerits[fit_class] = d
+ best_place [fit_class] = r.break_node
+ best_pl_line [fit_class] = line_number
+ if do_last_line_fit then
+ best_pl_short[fit_class] = shortfall
+ best_pl_glue [fit_class] = g
+ if trace_lastlinefit then
+ report_parbuilders("storing last line fit short %a and glue %p in class %a",shortfall,g,fit_class)
+ end
+ end
+ if d < par.minimum_demerits then
+ par.minimum_demerits = d
+ end
+ end
+ if not node_r_stays_active then
+ prev_r, r = deactivate_node(par,prev_prev_r,prev_r,r,cur_active_width,checked_expansion)
+ end
+ end
+ end
+ end
+end
+
+-- we can call the normal one for simple box building in the otr so we need
+-- frequent enabling/disabling
+
+local dcolor = { [0] = "red", "green", "blue", "magenta", "cyan", "gray" }
+
+local temp_head = new_temp()
+
+function constructors.methods.basic(head,d)
+ head = tonut(head)
+
+ if trace_basic then
+ report_parbuilders("starting at %a",head)
+ end
+
+ local par = initialize_line_break(head,d)
+
+ local checked_expansion = par.checked_expansion
+ local active_width = par.active_width
+ local disc_width = par.disc_width
+ local background = par.background
+ local tracing_paragraphs = par.tracing_paragraphs
+
+ local dirstack = new_dir_stack()
+
+ if tracing_paragraphs then
+ diagnostics.start()
+ if par.pretolerance >= 0 then
+ diagnostics.current_pass(par,"firstpass")
+ end
+ end
+
+ while true do
+ reset_meta(par)
+ if par.threshold > infinite_badness then
+ par.threshold = infinite_badness
+ end
+ par.active.next = {
+ id = unhyphenated_code,
+ subtype = fit_decent_class,
+ next = par.active,
+ break_node = nil,
+ line_number = par.first_line + 1,
+ total_demerits = 0,
+ active_short = 0,
+ active_glue = 0,
+ }
+ active_width.size = background.size
+ active_width.stretch = background.stretch
+ active_width.fi = background.fi
+ active_width.fil = background.fil
+ active_width.fill = background.fill
+ active_width.filll = background.filll
+ active_width.shrink = background.shrink
+
+ if checked_expansion then
+ active_width.adjust_stretch = 0
+ active_width.adjust_shrink = 0
+ end
+
+ par.passive = nil -- = 0
+ par.printed_node = temp_head -- only when tracing, shared
+ par.pass_number = 0
+-- par.auto_breaking = true
+
+ setfield(temp_head,"next",head)
+
+ local current = head
+ local first_p = current
+
+ local auto_breaking = true
+
+ par.font_in_short_display = 0
+
+ if current and getid(current) == whatsit_code and getsubtype(current) == localpar_code then
+ par.init_internal_left_box = getfield(current,"box_left")
+ par.init_internal_left_box_width = getfield(current,"box_left_width")
+ par.internal_pen_inter = getfield(current,"pen_inter")
+ par.internal_pen_broken = getfield(current,"pen_broken")
+ par.internal_left_box = par.init_internal_left_box
+ par.internal_left_box_width = par.init_internal_left_box_width
+ par.internal_right_box = getfield(current,"box_right")
+ par.internal_right_box_width = getfield(current,"box_right_width")
+ end
+
+ -- all passes are combined in this loop so maybe we should split this into
+ -- three function calls; we then also need to do the wrap_up elsewhere
+
+ -- split into normal and expansion loop
+
+ -- use an active local
+
+ local fontexp, lastfont -- we can pass fontexp to calculate width if needed
+
+ -- i flattened the inner loop over glyphs .. it looks nicer and the extra p_active ~= n_active
+ -- test is fast enough (and try_break now returns the updated values); the kern helper has been
+ -- inlined as it did a double check on id so in fact we had hardly any code to share
+
+ local p_active = par.active
+ local n_active = p_active and p_active.next
+ local second_pass = par.second_pass
+
+ trialcount = 0
+
+ while current and p_active ~= n_active do
+ local id = getid(current)
+ if id == glyph_code then
+ if is_rotated[par.line_break_dir] then
+ active_width.size = active_width.size + getfield(current,"height") + getfield(current,"depth")
+ else
+ active_width.size = active_width.size + getfield(current,"width")
+ end
+ if checked_expansion then
+ local currentfont = getfont(current)
+ local data= checked_expansion[currentfont]
+ if data then
+ if currentfont ~= lastfont then
+ fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
+ lastfont = currentfont
+ end
+ if fontexps then
+ local expansion = fontexps[getchar(current)]
+ if expansion then
+ active_width.adjust_stretch = active_width.adjust_stretch + expansion.glyphstretch
+ active_width.adjust_shrink = active_width.adjust_shrink + expansion.glyphshrink
+ end
+ end
+ end
+ end
+ elseif id == hlist_code or id == vlist_code then
+ if is_parallel[getfield(current,"dir")][par.line_break_dir] then
+ active_width.size = active_width.size + getfield(current,"width")
+ else
+ active_width.size = active_width.size + getfield(current,"depth") + getfield(current,"height")
+ end
+ elseif id == glue_code then
+-- if par.auto_breaking then
+ if auto_breaking then
+ local prev_p = getprev(current)
+ if prev_p and prev_p ~= temp_head then
+ local id = getid(prev_p)
+ if id == glyph_code or
+ (id < math_code and (id ~= whatsit_code or getsubtype(prev_p) ~= dir_code)) or -- was: precedes_break(prev_p)
+ (id == kern_code and getsubtype(prev_p) ~= userkern_code) then
+ p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
+ end
+ end
+ end
+ local spec = check_shrinkage(par,getfield(current,"spec"))
+ local order = stretch_orders[getfield(spec,"stretch_order")]
+ setfield(current,"spec",spec)
+ active_width.size = active_width.size + getfield(spec,"width")
+ active_width[order] = active_width[order] + getfield(spec,"stretch")
+ active_width.shrink = active_width.shrink + getfield(spec,"shrink")
+ elseif id == disc_code then
+ local subtype = getsubtype(current)
+ if subtype ~= second_disc_code then
+ local line_break_dir = par.line_break_dir
+ if second_pass or subtype <= automatic_disc_code then
+ local actual_pen = subtype == automatic_disc_code and par.ex_hyphen_penalty or par.hyphen_penalty
+ local pre = getfield(current,"pre")
+ if not pre then -- trivial pre-break
+ disc_width.size = 0
+ if checked_expansion then
+ disc_width.adjust_stretch = 0
+ disc_width.adjust_shrink = 0
+ end
+ p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, current, checked_expansion)
+ else
+ local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,pre)
+ disc_width.size = size
+ active_width.size = active_width.size + size
+ if checked_expansion then
+ disc_width.adjust_stretch = adjust_stretch
+ disc_width.adjust_shrink = adjust_shrink
+ active_width.adjust_stretch = active_width.adjust_stretch + adjust_stretch
+ active_width.adjust_shrink = active_width.adjust_shrink + adjust_shrink
+ else
+ -- disc_width.adjust_stretch = 0
+ -- disc_width.adjust_shrink = 0
+ end
+ p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, current, checked_expansion)
+ if subtype == first_disc_code then
+ local cur_p_next = getnext(current)
+ if getid(cur_p_next) ~= disc_code or getsubtype(cur_p_next) ~= second_disc_code then
+ report_parbuilders("unsupported disc at location %a",1)
+ else
+ local pre = getfield(cur_p_next,"pre")
+ if pre then
+ local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,pre)
+ disc_width.size = disc_width.size + size
+ if checked_expansion then
+ disc_width.adjust_stretch = disc_width.adjust_stretch + adjust_stretch
+ disc_width.adjust_shrink = disc_width.adjust_shrink + adjust_shrink
+ end
+ p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, cur_p_next, checked_expansion)
+ --
+ -- I will look into this some day ... comment in linebreak.w says that this fails,
+ -- maybe this is what Taco means with his comment in the luatex manual.
+ --
+ -- do_one_seven_eight(sub_disc_width_from_active_width);
+ -- do_one_seven_eight(reset_disc_width);
+ -- s = vlink_no_break(vlink(current));
+ -- add_to_widths(s, line_break_dir, pdf_adjust_spacing,disc_width);
+ -- ext_try_break(...,first_p,vlink(current));
+ --
+ else
+ report_parbuilders("unsupported disc at location %a",2)
+ end
+ end
+ end
+ -- beware, we cannot restore to a saved value as the try_break adapts active_width
+ active_width.size = active_width.size - disc_width.size
+ if checked_expansion then
+ active_width.adjust_stretch = active_width.adjust_stretch - disc_width.adjust_stretch
+ active_width.adjust_shrink = active_width.adjust_shrink - disc_width.adjust_shrink
+ end
+ end
+ end
+ local replace = getfield(current,"replace")
+ if replace then
+ local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,replace)
+ active_width.size = active_width.size + size
+ if checked_expansion then
+ active_width.adjust_stretch = active_width.adjust_stretch + adjust_stretch
+ active_width.adjust_shrink = active_width.adjust_shrink + adjust_shrink
+ end
+ end
+ end
+ elseif id == kern_code then
+ if getsubtype(current) == userkern_code then
+ local v = getnext(current)
+-- if par.auto_breaking and getid(v) == glue_code then
+ if auto_breaking and getid(v) == glue_code then
+ p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
+ end
+ local active_width = par.active_width
+ active_width.size = active_width.size + getfield(current,"kern")
+ else
+ local kern = getfield(current,"kern")
+ if kern ~= 0 then
+ active_width.size = active_width.size + kern
+ if checked_expansion and expand_kerns and (getsubtype(current) == kerning_code or getattr(current,a_fontkern)) then
+ local stretch, shrink = kern_stretch_shrink(current,kern)
+ if expand_kerns == "stretch" then
+ active_width.adjust_stretch = active_width.adjust_stretch + stretch
+ elseif expand_kerns == "shrink" then
+ active_width.adjust_shrink = active_width.adjust_shrink + shrink
+ else
+ active_width.adjust_stretch = active_width.adjust_stretch + stretch
+ active_width.adjust_shrink = active_width.adjust_shrink + shrink
+ end
+ end
+ end
+ end
+ elseif id == math_code then
+-- par.auto_breaking = getsubtype(current) == endmath_code
+ auto_breaking = getsubtype(current) == endmath_code
+ local v = getnext(current)
+-- if par.auto_breaking and getid(v) == glue_code then
+ if auto_breaking and getid(v) == glue_code then
+ p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
+ end
+ local active_width = par.active_width
+ active_width.size = active_width.size + getfield(current,"surround")
+ elseif id == rule_code then
+ active_width.size = active_width.size + getfield(current,"width")
+ elseif id == penalty_code then
+ p_active, n_active = try_break(getfield(current,"penalty"), unhyphenated_code, par, first_p, current, checked_expansion)
+ elseif id == whatsit_code then
+ local subtype = getsubtype(current)
+ if subtype == localpar_code then
+ par.internal_pen_inter = getfield(current,"pen_inter")
+ par.internal_pen_broken = getfield(current,"pen_broken")
+ par.internal_left_box = getfield(current,"box_left")
+ par.internal_left_box_width = getfield(current,"box_left_width")
+ par.internal_right_box = getfield(current,"box_right")
+ par.internal_right_box_width = getfield(current,"box_right_width")
+ elseif subtype == dir_code then
+ par.line_break_dir = checked_line_dir(dirstack) or par.line_break_dir
+ else
+ local get_width = get_whatsit_width[subtype]
+ if get_width then
+ active_width.size = active_width.size + get_width(current,par.line_break_dir)
+ end
+ end
+ elseif trace_unsupported then
+ if id == mark_code or id == ins_code or id == adjust_code then
+ -- skip
+ else
+ report_parbuilders("node of type %a found in paragraph",type(id))
+ end
+ end
+ current = getnext(current)
+ end
+ if not current then
+ local p_active, n_active = try_break(eject_penalty, hyphenated_code, par, first_p, current, checked_expansion)
+ if n_active ~= p_active then
+ local r = n_active
+ par.fewest_demerits = awful_badness
+ repeat -- use local d
+ if r.id ~= delta_code and r.total_demerits < par.fewest_demerits then
+ par.fewest_demerits = r.total_demerits
+ par.best_bet = r
+ end
+ r = r.next
+ until r == p_active
+ par.best_line = par.best_bet.line_number
+ local asked_looseness = par.looseness
+ if asked_looseness == 0 then
+ return tonode(wrap_up(par))
+ end
+ local r = n_active
+ local actual_looseness = 0
+ -- minimize assignments to par but happens seldom
+ repeat
+ if r.id ~= delta_code then
+ local line_diff = r.line_number - par.best_line
+ par.line_diff = line_diff
+ if (line_diff < actual_looseness and asked_looseness <= line_diff) or
+ (line_diff > actual_looseness and asked_looseness >= line_diff) then
+ par.best_bet = r
+ actual_looseness = line_diff
+ par.fewest_demerits = r.total_demerits
+ elseif line_diff == actual_looseness and r.total_demerits < par.fewest_demerits then
+ par.best_bet = r
+ par.fewest_demerits = r.total_demerits
+ end
+ end
+ r = r.next
+ until r == p_active
+ par.best_line = par.best_bet.line_number
+ if actual_looseness == asked_looseness or par.final_pass then
+ return tonode(wrap_up(par))
+ end
+ end
+ end
+ reset_meta(par) -- clean up the memory by removing the break nodes
+ if not second_pass then
+ if tracing_paragraphs then
+ diagnostics.current_pass(par,"secondpass")
+ end
+ par.threshold = par.tolerance
+ par.second_pass = true
+ par.final_pass = par.emergency_stretch <= 0
+ else
+ if tracing_paragraphs then
+ diagnostics.current_pass(par,"emergencypass")
+ end
+ par.background.stretch = par.background.stretch + par.emergency_stretch
+ par.final_pass = true
+ end
+ end
+ return tonode(wrap_up(par))
+end
+
+-- standard tex logging .. will be adapted ..
+
+local function write_esc(cs)
+ local esc = tex.escapechar
+ if esc then
+ write("log",utfchar(esc),cs)
+ else
+ write("log",cs)
+ end
+end
+
+function diagnostics.start()
+end
+
+function diagnostics.stop()
+ write_nl("log",'')
+end
+
+function diagnostics.current_pass(par,what)
+ write_nl("log",format("@%s",what))
+end
+
+local verbose = false -- true
+
+local function short_display(target,a,font_in_short_display)
+ while a do
+ local id = getid(a)
+ if id == glyph_code then
+ local font = getfont(a)
+ if font ~= font_in_short_display then
+ write(target,tex.fontidentifier(font) .. ' ')
+ font_in_short_display = font
+ end
+ if getsubtype(a) == ligature_code then
+ font_in_short_display = short_display(target,getfield(a,"components"),font_in_short_display)
+ else
+ write(target,utfchar(getchar(a)))
+ end
+ elseif id == disc_code then
+ font_in_short_display = short_display(target,getfield(a,"pre"),font_in_short_display)
+ font_in_short_display = short_display(target,getfield(a,"post"),font_in_short_display)
+ elseif verbose then
+ write(target,format("[%s]",nodecodes[id]))
+ elseif id == rule_code then
+ write(target,"|")
+ elseif id == glue_code then
+ if getfield(getfield(a,"spec"),"writable") then
+ write(target," ")
+ end
+ elseif id == kern_code and (getsubtype(a) == userkern_code or getattr(a,a_fontkern)) then
+ if verbose then
+ write(target,"[|]")
+ else
+ write(target,"")
+ end
+ elseif id == math_code then
+ write(target,"$")
+ else
+ write(target,"[]")
+ end
+ a = getnext(a)
+ end
+ return font_in_short_display
+end
+
+diagnostics.short_display = short_display
+
+function diagnostics.break_node(par, q, fit_class, break_type, current) -- %d ?
+ local passive = par.passive
+ local typ_ind = break_type == hyphenated_code and '-' or ""
+ if par.do_last_line_fit then
+ local s = number.toscaled(q.active_short)
+ local g = number.toscaled(q.active_glue)
+ if current then
+ write_nl("log",format("@@%d: line %d.%d%s t=%s s=%s g=%s",
+ passive.serial or 0,q.line_number-1,fit_class,typ_ind,q.total_demerits,s,g))
+ else
+ write_nl("log",format("@@%d: line %d.%d%s t=%s s=%s a=%s",
+ passive.serial or 0,q.line_number-1,fit_class,typ_ind,q.total_demerits,s,g))
+ end
+ else
+ write_nl("log",format("@@%d: line %d.%d%s t=%s",
+ passive.serial or 0,q.line_number-1,fit_class,typ_ind,q.total_demerits))
+ end
+ if not passive.prev_break then
+ write("log"," -> @0")
+ else
+ write("log",format(" -> @%d", passive.prev_break.serial or 0))
+ end
+end
+
+function diagnostics.feasible_break(par, current, r, b, pi, d, artificial_demerits)
+ local printed_node = par.printed_node
+ if printed_node ~= current then
+ write_nl("log","")
+ if not current then
+ par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
+ else
+ local save_link = getnext(current)
+ setfield(cur_p,"next",nil)
+ write_nl("log","")
+ par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
+ setfield(cur_p,"next",save_link)
+ end
+ par.printed_node = current
+ end
+ write_nl("log","@")
+ if not current then
+ write_esc("par")
+ else
+ local id = getid(current)
+ if id == glue_code then
+ -- print nothing
+ elseif id == penalty_code then
+ write_esc("penalty")
+ elseif id == disc_code then
+ write_esc("discretionary")
+ elseif id == kern_code then
+ write_esc("kern")
+ elseif id == math_code then
+ write_esc("math")
+ else
+ write_esc("unknown")
+ end
+ end
+ local via, badness, demerits = 0, '*', '*'
+ if r.break_node then
+ via = r.break_node.serial or 0
+ end
+ if b <= infinite_badness then
+ badness = tonumber(d) -- format("%d", b)
+ end
+ if not artificial_demerits then
+ demerits = tonumber(d) -- format("%d", d)
+ end
+ write("log",format(" via @%d b=%s p=%s d=%s", via, badness, pi, demerits))
+end
+
+-- reporting --
+
+statistics.register("alternative parbuilders", function()
+ if nofpars > 0 then
+ return format("%s paragraphs, %s lines (%s protruded, %s adjusted)", nofpars, noflines, nofprotrudedlines, nofadjustedlines)
+ end
+end)
+
+-- actually scaling kerns is not such a good idea and it will become
+-- configureable
+
+-- This is no way a replacement for the built in (fast) packer
+-- it's just an alternative for special (testing) purposes.
+--
+-- We could use two hpacks: one to be used in the par builder
+-- and one to be used for other purposes. The one in the par
+-- builder is much more simple as it does not need the expansion
+-- code but only need to register the effective expansion factor
+-- with the glyph.
+
+local function glyph_width_height_depth(curdir,pdir,p)
+ local wd = getfield(p,"width")
+ local ht = getfield(p,"height")
+ local dp = getfield(p,"depth")
+ if is_rotated[curdir] then
+ if is_parallel[curdir][pdir] then
+ local half = (ht + dp) / 2
+ return wd, half, half
+ else
+ local half = wd / 2
+ return ht + dp, half, half
+ end
+ elseif is_rotated[pdir] then
+ if is_parallel[curdir][pdir] then
+ local half = (ht + dp) / 2
+ return wd, half, half
+ else
+ return ht + dp, wd, 0 -- weird
+ end
+ else
+ if glyphdir_is_equal[curdir][pdir] then
+ return wd, ht, dp
+ elseif is_opposite[curdir][pdir] then
+ return wd, dp, ht
+ else -- can this happen?
+ return ht + dp, wd, 0
+ end
+ end
+end
+
+local function pack_width_height_depth(curdir,pdir,p)
+ local wd = getfield(p,"width")
+ local ht = getfield(p,"height")
+ local dp = getfield(p,"depth")
+ if is_rotated[curdir] then
+ if is_parallel[curdir][pdir] then
+ local half = (ht + dp) / 2
+ return wd, half, half
+ else -- can this happen?
+ local half = wd / 2
+ return ht + dp, half, half
+ end
+ else
+ if pardir_is_equal[curdir][pdir] then
+ return wd, ht, dp
+ elseif is_opposite[curdir][pdir] then
+ return wd, dp, ht
+ else -- weird dimensions, can this happen?
+ return ht + dp, wd, 0
+ end
+ end
+end
+
+-- local function xpack(head,width,method,direction,analysis)
+--
+-- -- inspect(analysis)
+--
+-- local expansion = method == "cal_expand_ratio"
+-- local natural = analysis.size
+-- local font_stretch = analysis.adjust_stretch
+-- local font_shrink = analysis.adjust_shrink
+-- local font_expand_ratio = 0
+-- local delta = width - natural
+--
+-- local hlist = new_node("hlist")
+--
+-- setfield(hlist,"list",head)
+-- setfield(hlist,"dir",direction or tex.textdir)
+-- setfield(hlist,"width",width)
+-- setfield(hlist,"height",height)
+-- setfield(hlist,"depth",depth)
+--
+-- if delta == 0 then
+--
+-- setfield(hlist,"glue_sign",0)
+-- setfield(hlist,"glue_order",0)
+-- setfield(hlist,"glue_set",0)
+--
+-- else
+--
+-- local order = analysis.filll ~= 0 and fillcodes.filll or
+-- analysis.fill ~= 0 and fillcodes.fill or
+-- analysis.fil ~= 0 and fillcodes.fil or
+-- analysis.fi ~= 0 and fillcodes.fi or 0
+--
+-- if delta > 0 then
+--
+-- if expansion and order == 0 and font_stretch > 0 then
+-- font_expand_ratio = (delta/font_stretch) * 1000
+-- else
+-- local stretch = analysis.stretch
+-- if stretch ~= 0 then
+-- setfield(hlist,"glue_sign",1) -- stretch
+-- setfield(hlist,"glue_order",order)
+-- setfield(hlist,"glue_set",delta/stretch)
+-- else
+-- setfield(hlist,"glue_sign",0) -- nothing
+-- setfield(hlist,"glue_order",order)
+-- setfield(hlist,"glue_set",0)
+-- end
+-- end
+--
+-- else
+--
+-- if expansion and order == 0 and font_shrink > 0 then
+-- font_expand_ratio = (delta/font_shrink) * 1000
+-- else
+-- local shrink = analysis.shrink
+-- if shrink ~= 0 then
+-- setfield(hlist,"glue_sign",2) -- shrink
+-- setfield(hlist,"glue_order",order)
+-- setfield(hlist,"glue_set",-delta/stretch)
+-- else
+-- setfield(hlist,"glue_sign",0) -- nothing
+-- setfield(hlist,"glue_order",order)
+-- setfield(hlist,"glue_set",0)
+-- end
+-- end
+--
+-- end
+--
+-- end
+--
+-- if not expansion or font_expand_ratio == 0 then
+-- -- nothing
+-- elseif font_expand_ratio > 0 then
+-- if font_expand_ratio > 1000 then
+-- font_expand_ratio = 1000
+-- end
+-- local current = head
+-- while current do
+-- local id = getid(current)
+-- if id == glyph_code then
+-- local stretch, shrink = char_stretch_shrink(current) -- get only one
+-- if stretch then
+-- if trace_expansion then
+-- setnodecolor(g,"hz:positive")
+-- end
+-- current.expansion_factor = font_expand_ratio * stretch
+-- end
+-- elseif id == kern_code then
+-- local kern = getfield(current,"kern")
+-- if kern ~= 0 and getsubtype(current) == kerning_code then
+-- setfield(current,"kern",font_expand_ratio * kern)
+-- end
+-- end
+-- current = getnext(current)
+-- end
+-- elseif font_expand_ratio < 0 then
+-- if font_expand_ratio < -1000 then
+-- font_expand_ratio = -1000
+-- end
+-- local current = head
+-- while current do
+-- local id = getid(current)
+-- if id == glyph_code then
+-- local stretch, shrink = char_stretch_shrink(current) -- get only one
+-- if shrink then
+-- if trace_expansion then
+-- setnodecolor(g,"hz:negative")
+-- end
+-- current.expansion_factor = font_expand_ratio * shrink
+-- end
+-- elseif id == kern_code then
+-- local kern = getfield(current,"kern")
+-- if kern ~= 0 and getsubtype(current) == kerning_code then
+-- setfield(current,"kern",font_expand_ratio * kern)
+-- end
+-- end
+-- current = getnext(current)
+-- end
+-- end
+-- return hlist, 0
+-- end
+
+local function hpack(head,width,method,direction,firstline,line) -- fast version when head = nil
+
+ -- we can pass the adjust_width and adjust_height so that we don't need to recalculate them but
+ -- with the glue mess it's less trivial as we lack detail .. challenge
+
+ local hlist = new_node("hlist")
+
+ setfield(hlist,"dir",direction)
+
+ if head == nil then
+ setfield(hlist,"width",width)
+ return hlist, 0
+ else
+ setfield(hlist,"list",head)
+ end
+
+ local cal_expand_ratio = method == "cal_expand_ratio" or method == "subst_ex_font"
+
+ direction = direction or tex.textdir
+
+ local line = 0
+
+ local height = 0
+ local depth = 0
+ local natural = 0
+ local font_stretch = 0
+ local font_shrink = 0
+ local font_expand_ratio = 0
+ local last_badness = 0
+ local expansion_stack = cal_expand_ratio and { } -- todo: optionally pass this
+ local expansion_index = 0
+ local total_stretch = { [0] = 0, 0, 0, 0, 0 }
+ local total_shrink = { [0] = 0, 0, 0, 0, 0 }
+
+ local hpack_dir = direction
+
+ local adjust_head = texlists.adjust_head
+ local pre_adjust_head = texlists.pre_adjust_head
+ local adjust_tail = adjust_head and slide_nodelist(adjust_head) -- todo: find_tail
+ local pre_adjust_tail = pre_adjust_head and slide_nodelist(pre_adjust_head) -- todo: find_tail
+
+ new_dir_stack(hpack_dir)
+
+ local checked_expansion = false
+
+ if cal_expand_ratio then
+ checked_expansion = { }
+ setmetatableindex(checked_expansion,check_expand_lines)
+ end
+
+ -- this one also needs to check the font, so in the end indeed we might end up with two variants
+
+ local fontexps, lastfont
+
+ local function process(current) -- called nested in disc replace
+
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ if cal_expand_ratio then
+ local currentfont = getfont(current)
+ if currentfont ~= lastfont then
+ fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
+ lastfont = currentfont
+ end
+ if fontexps then
+ local expansion = fontexps[getchar(current)]
+ if expansion then
+ font_stretch = font_stretch + expansion.glyphstretch
+ font_shrink = font_shrink + expansion.glyphshrink
+ expansion_index = expansion_index + 1
+ expansion_stack[expansion_index] = current
+ end
+ end
+ end
+ -- use inline
+ local wd, ht, dp = glyph_width_height_depth(hpack_dir,"TLT",current) -- was TRT ?
+ natural = natural + wd
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ elseif id == kern_code then
+ local kern = getfield(current,"kern")
+ if kern == 0 then
+ -- no kern
+ elseif getsubtype(current) == kerning_code then -- check getfield(p,"kern")
+ if cal_expand_ratio then
+ local stretch, shrink = kern_stretch_shrink(current,kern)
+ font_stretch = font_stretch + stretch
+ font_shrink = font_shrink + shrink
+ expansion_index = expansion_index + 1
+ expansion_stack[expansion_index] = current
+ end
+ natural = natural + kern
+ else
+ natural = natural + kern
+ end
+ elseif id == disc_code then
+ local subtype = getsubtype(current)
+ if subtype ~= second_disc_code then
+ -- todo : local stretch, shrink = char_stretch_shrink(s)
+ local replace = getfield(current,"replace")
+ if replace then
+ process(replace)
+ end
+ end
+ elseif id == glue_code then
+ local spec = getfield(current,"spec")
+ natural = natural + getfield(spec,"width")
+ local op = getfield(spec,"stretch_order")
+ local om = getfield(spec,"shrink_order")
+ total_stretch[op] = total_stretch[op] + getfield(spec,"stretch")
+ total_shrink [om] = total_shrink [om] + getfield(spec,"shrink")
+ if getsubtype(current) >= leaders_code then
+ local leader = getleader(current)
+ local ht = getfield(leader,"height")
+ local dp = getfield(leader,"depth")
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ end
+ elseif id == hlist_code or id == vlist_code then
+ local sh = getfield(current,"shift")
+ local wd, ht, dp = pack_width_height_depth(hpack_dir,getfield(current,"dir") or hpack_dir,current) -- added: or pack_dir
+ local hs, ds = ht - sh, dp + sh
+ natural = natural + wd
+ if hs > height then
+ height = hs
+ end
+ if ds > depth then
+ depth = ds
+ end
+ elseif id == rule_code then
+ local wd = getfield(current,"width")
+ local ht = getfield(current,"height")
+ local dp = getfield(current,"depth")
+ natural = natural + wd
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ elseif id == math_code then
+ natural = natural + getfield(current,"surround")
+ elseif id == unset_code then
+ local wd = getfield(current,"width")
+ local ht = getfield(current,"height")
+ local dp = getfield(current,"depth")
+ local sh = getfield(current,"shift")
+ local hs = ht - sh
+ local ds = dp + sh
+ natural = natural + wd
+ if hs > height then
+ height = hs
+ end
+ if ds > depth then
+ depth = ds
+ end
+ elseif id == ins_code or id == mark_code then
+ local prev = getprev(current)
+ local next = getnext(current)
+ if adjust_tail then -- todo
+ if next then
+ setfield(next,"prev",prev)
+ end
+ if prev then
+ setfield(prev,"next",next)
+ end
+ setfield(current,"prev",adjust_tail)
+ setfield(current,"next",nil)
+ adjust_setfield(tail,"next",current)
+ adjust_tail = current
+ else
+ adjust_head = current
+ adjust_tail = current
+ setfield(current,"prev",nil)
+ setfield(current,"next",nil)
+ end
+ elseif id == adjust_code then
+ local list = getlist(current)
+ if adjust_tail then
+ adjust_setfield(tail,"next",list)
+ else
+ adjust_head = list
+ end
+ adjust_tail = slide_nodelist(list) -- find_tail(list)
+ elseif id == whatsit_code then
+ local subtype = getsubtype(current)
+ if subtype == dir_code then
+ hpack_dir = checked_line_dir(stack,current) or hpack_dir
+ else
+ local get_dimensions = get_whatsit_dimensions[subtype]
+ if get_dimensions then
+ local wd, ht, dp = get_dimensions(current,hpack_dir)
+ natural = natural + wd
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ end
+ end
+ elseif id == marginkern_code then
+ local width = getfield(current,"width")
+ if cal_expand_ratio then
+ -- is this ok?
+ local glyph = getfield(current,"glyph")
+ local char_pw = getsubtype(current) == leftmargin_code and left_pw or right_pw
+ font_stretch = font_stretch - width - char_pw(glyph)
+ font_shrink = font_shrink - width - char_pw(glyph)
+ expansion_index = expansion_index + 1
+ expansion_stack[expansion_index] = glyph
+ end
+ natural = natural + width
+ end
+ current = getnext(current)
+ end
+
+ end
+
+ process(head)
+
+ if adjust_tail then
+ adjust_tail.next = nil -- todo
+ end
+ if pre_adjust_tail then
+ pre_adjust_tail.next = nil -- todo
+ end
+ if method == "additional" then
+ width = width + natural
+ end
+
+ setfield(hlist,"width",width)
+ setfield(hlist,"height",height)
+ setfield(hlist,"depth",depth)
+
+ local delta = width - natural
+ if delta == 0 then
+ setfield(hlist,"glue_sign",0)
+ setfield(hlist,"glue_order",0)
+ setfield(hlist,"glue_set",0)
+ elseif delta > 0 then
+ -- natural width smaller than requested width
+ local order = (total_stretch[4] ~= 0 and 4 or total_stretch[3] ~= 0 and 3) or
+ (total_stretch[2] ~= 0 and 2 or total_stretch[1] ~= 0 and 1) or 0
+ if cal_expand_ratio and order == 0 and font_stretch > 0 then -- check sign of font_stretch
+ font_expand_ratio = delta/font_stretch
+
+ if font_expand_ratio > 1 then
+ font_expand_ratio = 1
+ end
+
+ local fontexps, lastfont
+ for i=1,expansion_index do
+ local g = expansion_stack[i]
+ local e
+ if getid(g) == glyph_code then
+ local currentfont = getfont(g)
+ if currentfont ~= lastfont then
+ fontexps = expansions[currentfont]
+ lastfont = currentfont
+ end
+ local data = fontexps[getchar(g)]
+ if trace_expansion then
+ setnodecolor(g,"hz:positive")
+ end
+ e = font_expand_ratio * data.glyphstretch / 1000
+ else
+ local kern = getfield(g,"kern")
+ local stretch, shrink = kern_stretch_shrink(g,kern)
+ e = font_expand_ratio * stretch / 1000
+ end
+ setfield(g,"expansion_factor",e)
+ end
+ end
+ local tso = total_stretch[order]
+ if tso ~= 0 then
+ setfield(hlist,"glue_sign",1)
+ setfield(hlist,"glue_order",order)
+ setfield(hlist,"glue_set",delta/tso)
+ else
+ setfield(hlist,"glue_sign",0)
+ setfield(hlist,"glue_order",order)
+ setfield(hlist,"glue_set",0)
+ end
+ if font_expand_ratio ~= 0 then
+ -- todo
+ elseif order == 0 then -- and getlist(hlist) then
+ last_badness = calculate_badness(delta,total_stretch[0])
+ if last_badness > tex.hbadness then
+ if last_badness > 100 then
+ diagnostics.underfull_hbox(hlist,line,last_badness)
+ else
+ diagnostics.loose_hbox(hlist,line,last_badness)
+ end
+ end
+ end
+ else
+ -- natural width larger than requested width
+ local order = total_shrink[4] ~= 0 and 4 or total_shrink[3] ~= 0 and 3
+ or total_shrink[2] ~= 0 and 2 or total_shrink[1] ~= 0 and 1 or 0
+ if cal_expand_ratio and order == 0 and font_shrink > 0 then -- check sign of font_shrink
+ font_expand_ratio = delta/font_shrink
+
+ if font_expand_ratio < 1 then
+ font_expand_ratio = -1
+ end
+
+ local fontexps, lastfont
+ for i=1,expansion_index do
+ local g = expansion_stack[i]
+ local e
+ if getid(g) == glyph_code then
+ local currentfont = getfont(g)
+ if currentfont ~= lastfont then
+ fontexps = expansions[currentfont]
+ lastfont = currentfont
+ end
+ local data = fontexps[getchar(g)]
+ if trace_expansion then
+ setnodecolor(g,"hz:negative")
+ end
+ e = font_expand_ratio * data.glyphshrink / 1000
+ else
+ local kern = getfield(g,"kern")
+ local stretch, shrink = kern_stretch_shrink(g,kern)
+ e = font_expand_ratio * shrink / 1000
+ end
+ setfield(g,"expansion_factor",e)
+ end
+ end
+ local tso = total_shrink[order]
+ if tso ~= 0 then
+ setfield(hlist,"glue_sign",2)
+ setfield(hlist,"glue_order",order)
+ setfield(hlist,"glue_set",-delta/tso)
+ else
+ setfield(hlist,"glue_sign",0)
+ setfield(hlist,"glue_order",order)
+ setfield(hlist,"glue_set",0)
+ end
+ if font_expand_ratio ~= 0 then
+ -- todo
+ elseif tso < -delta and order == 0 then -- and getlist(hlist) then
+ last_badness = 1000000
+ setfield(hlist,"glue_set",1)
+ local fuzz = - delta - total_shrink[0]
+ local hfuzz = tex.hfuzz
+ if fuzz > hfuzz or tex.hbadness < 100 then
+ local overfullrule = tex.overfullrule
+ if fuzz > hfuzz and overfullrule > 0 then
+ -- weird, is always called and no rules shows up
+ setfield(slide_nodelist(list),"next",new_rule(overfullrule,nil,nil,hlist.dir)) -- todo: find_tail
+ end
+ diagnostics.overfull_hbox(hlist,line,-delta)
+ end
+ elseif order == 0 and hlist.list and last_badness > tex.hbadness then
+ diagnostics.bad_hbox(hlist,line,last_badness)
+ end
+ end
+ return hlist, last_badness
+end
+
+xpack_nodes = hpack -- comment this for old fashioned expansion (we need to fix float mess)
+
+local function common_message(hlist,line,str)
+ write_nl("")
+ if status.output_active then -- unset
+ write(str," has occurred while \\output is active")
+ end
+ local fileline = status.linenumber
+ if line > 0 then
+ write(str," in paragraph at lines ",fileline,"--",fileline+line-1)
+ elseif line < 0 then
+ write(str," in alignment at lines ",fileline,"--",fileline-line-1)
+ else
+ write(str," detected at line ",fileline)
+ end
+ write_nl("")
+ diagnostics.short_display(hlist.list,false)
+ write_nl("")
+ -- diagnostics.start()
+ -- show_box(hlist.list)
+ -- diagnostics.stop()
+end
+
+function diagnostics.overfull_hbox(hlist,line,d)
+ common_message(hlist,line,format("Overfull \\hbox (%spt too wide)",number.toscaled(d)))
+end
+
+function diagnostics.bad_hbox(hlist,line,b)
+ common_message(hlist,line,format("Tight \\hbox (badness %i)",b))
+end
+
+function diagnostics.underfull_hbox(hlist,line,b)
+ common_message(hlist,line,format("Underfull \\hbox (badness %i)",b))
+end
+
+function diagnostics.loose_hbox(hlist,line,b)
+ common_message(hlist,line,format("Loose \\hbox (badness %i)",b))
+end
diff --git a/Master/texmf-dist/tex/context/base/node-met.lua b/Master/texmf-dist/tex/context/base/node-met.lua
new file mode 100644
index 00000000000..335ce2a9837
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/node-met.lua
@@ -0,0 +1,723 @@
+if not modules then modules = { } end modules ['node-nut'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is an experimental module. Don't use nuts for generic code, at least not till
+-- the regular code is proven stable. No support otherwise.
+
+-- luatex: todo: copylist should return h, t
+-- todo: see if using insert_before and insert_after makes sense here
+
+-- This file is a side effect of the \LUATEX\ speed optimization project of Luigi
+-- Scarso and me. As \CONTEXT\ spends over half its time in \LUA, we though that
+-- using \LUAJIT\ could improve performance. We've published some of our experiences
+-- elsewhere, but to summarize: \LUAJITTEX\ benefits a lot from the faster virtual
+-- machine, but when jit is turned of we loose some again. We experimented with
+-- ffi (without messing up the \CONTEXT\ code too much) but there we also lost more
+-- than we gained (mostly due to lack of compatible userdata support: it's all or
+-- nothing). This made us decide to look into the \LUA||\TEX\ interfacing and by
+-- profiling and careful looking at the (of course then still beta source code) we
+-- could come up with some improvements. The first showed up in 0.75 and we've more
+-- on the agenda for 0.80. Although some interfaces could be sped up significantly
+-- in practice we're only talking of 5||10\% on a \CONTEXT\ run and maybe more when
+-- complex and extensive node list manipulations happens (we're talking of hundreds
+-- of millions cross boundary calls then for documents of hundreds pages). One of the
+-- changes in the \CONTEXT\ code base is that we went from indexed access to nodes to
+-- function calls (in principle faster weren't it that the accessors need to do more
+-- checking which makes them slower) and from there to optimizing these calls as well
+-- as providing fast variants for well defined situations. At first optimizations were
+-- put in a separate \type {node.fast} table although some improvements could be
+-- ported to the main node functions. Because we got the feeling that more gain was
+-- possible (especially when using more complex fonts and \MKIV\ functionality) we
+-- eventually abandoned this approach and dropped the \type {fast} table in favour of
+-- another hack. In the process we had done lots of profiling and testing so we knew
+-- where time was wasted,
+--
+-- As lots of testing and experimenting was part of this project, I could not have
+-- done without stacks of new \CD s and \DVD s. This time Porcupine Tree, No-Man
+-- and Archive were came to rescue.
+
+local type, select = type, select
+local setmetatableindex = table.setmetatableindex
+
+-- First we get the metatable of a node:
+
+local metatable = nil
+
+do
+ local glyph = node.new("glyph",0)
+ metatable = getmetatable(glyph)
+ node.free(glyph)
+end
+
+-- statistics.tracefunction(node, "node", "getfield","setfield")
+-- statistics.tracefunction(node.direct,"node.direct","getfield","setfield")
+
+-- We start with some helpers and provide all relevant basic functions in the
+-- node namespace as well.
+
+local gonuts = type(node.direct) == "table"
+-- local gonuts = false
+
+nodes = nodes or { }
+local nodes = nodes
+
+nodes.gonuts = gonuts
+
+local nodecodes = nodes.nodecodes
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+
+nodes.tostring = node.tostring or tostring
+nodes.copy = node.copy
+nodes.copy_list = node.copy_list
+nodes.delete = node.delete
+nodes.dimensions = node.dimensions
+nodes.end_of_math = node.end_of_math
+nodes.flush_list = node.flush_list
+nodes.flush_node = node.flush_node
+nodes.free = node.free
+nodes.insert_after = node.insert_after
+nodes.insert_before = node.insert_before
+nodes.hpack = node.hpack
+nodes.new = node.new
+nodes.tail = node.tail
+nodes.traverse = node.traverse
+nodes.traverse_id = node.traverse_id
+nodes.slide = node.slide
+nodes.vpack = node.vpack
+nodes.fields = node.fields
+nodes.is_node = node.is_node
+
+nodes.first_glyph = node.first_glyph
+nodes.first_character = node.first_character
+nodes.has_glyph = node.has_glyph or node.first_glyph
+
+nodes.current_attr = node.current_attr
+nodes.do_ligature_n = node.do_ligature_n
+nodes.has_field = node.has_field
+nodes.last_node = node.last_node
+nodes.usedlist = node.usedlist
+nodes.protrusion_skippable = node.protrusion_skippable
+nodes.write = node.write
+
+nodes.has_attribute = node.has_attribute
+nodes.set_attribute = node.set_attribute
+nodes.unset_attribute = node.unset_attribute
+
+nodes.protect_glyphs = node.protect_glyphs
+nodes.unprotect_glyphs = node.unprotect_glyphs
+nodes.kerning = node.kerning
+nodes.ligaturing = node.ligaturing
+nodes.mlist_to_hlist = node.mlist_to_hlist
+
+if not gonuts or not node.getfield then
+ node.getfield = metatable.__index
+ node.setfield = metatable.__newindex
+end
+
+-- if gonuts then
+ nodes.tonode = function(n) return n end
+ nodes.tonut = function(n) return n end
+-- end
+
+local getfield = node.getfield
+local setfield = node.setfield
+
+local getattr = getfield
+local setattr = setfield
+
+local getnext = node.getnext or function(n) return getfield(n,"next") end
+local getprev = node.getprev or function(n) return getfield(n,"prev") end
+local getid = node.getid or function(n) return getfield(n,"id") end
+local getchar = node.getchar or function(n) return getfield(n,"char") end
+local getfont = node.getfont or function(n) return getfield(n,"font") end
+local getsubtype = node.getsubtype or function(n) return getfield(n,"subtype") end
+local getlist = node.getlist or function(n) return getfield(n,"list") end
+local getleader = node.getleader or function(n) return getfield(n,"leader") end
+
+nodes.getfield = getfield
+nodes.getattr = getattr
+
+nodes.setfield = setfield
+nodes.setattr = setattr
+
+nodes.getnext = getnext
+nodes.getprev = getprev
+nodes.getid = getid
+nodes.getchar = getchar
+nodes.getfont = getfont
+nodes.getsubtype = getsubtype
+nodes.getlist = getlist
+nodes.getleader = getleader
+
+nodes.getbox = node.getbox or tex.getbox
+nodes.setbox = node.setbox or tex.setbox
+nodes.getskip = node.getskip or tex.get
+
+local n_new_node = nodes.new
+local n_free_node = nodes.free
+local n_setfield = nodes.setfield
+local n_getfield = nodes.getfield
+local n_getnext = nodes.getnext
+local n_getprev = nodes.getprev
+local n_getid = nodes.getid
+local n_getlist = nodes.getlist
+local n_copy_node = nodes.copy
+local n_copy_list = nodes.copy_list
+local n_find_tail = nodes.tail
+local n_insert_after = nodes.insert_after
+local n_insert_before = nodes.insert_before
+local n_slide = nodes.slide
+
+local n_remove_node = node.remove -- not yet nodes.remove
+
+-- if t.id == glue_code then
+-- local s = t.spec
+-- if s and s.writable then
+-- free_node(s)
+-- end
+-- t.spec = nil
+-- end
+
+local function remove(head,current,free_too)
+ local t = current
+ head, current = n_remove_node(head,current)
+ if not t then
+ -- forget about it
+ elseif free_too then
+ n_free_node(t)
+ t = nil
+ else
+ n_setfield(t,"next",nil)
+ n_setfield(t,"prev",nil)
+ end
+ return head, current, t
+end
+
+nodes.remove = remove
+
+function nodes.delete(head,current)
+ return remove(head,current,true)
+end
+
+-- local h, c = nodes.replace(head,current,new)
+-- local c = nodes.replace(false,current,new)
+-- local c = nodes.replace(current,new)
+--
+-- todo: check for new.next and find tail
+
+function nodes.replace(head,current,new) -- no head returned if false
+ if not new then
+ head, current, new = false, head, current
+-- current, new = head, current
+ end
+ local prev = n_getprev(current)
+ local next = n_getnext(current)
+ if next then
+ n_setfield(new,"next",next)
+ n_setfield(next,"prev",new)
+ end
+ if prev then
+ n_setfield(new,"prev",prev)
+ n_setfield(prev,"next",new)
+ end
+ if head then
+ if head == current then
+ head = new
+ end
+ n_free_node(current)
+ return head, new
+ else
+ n_free_node(current)
+ return new
+ end
+end
+
+local function count(stack,flat)
+ local n = 0
+ while stack do
+ local id = n_getid(stack)
+ if not flat and id == hlist_code or id == vlist_code then
+ local list = n_getlist(stack)
+ if list then
+ n = n + 1 + count(list) -- self counts too
+ else
+ n = n + 1
+ end
+ else
+ n = n + 1
+ end
+ stack = n_getnext(stack)
+ end
+ return n
+end
+
+nodes.count = count
+
+function nodes.append(head,current,...)
+ for i=1,select("#",...) do
+ head, current = n_insert_after(head,current,(select(i,...)))
+ end
+ return head, current
+end
+
+function nodes.prepend(head,current,...)
+ for i=1,select("#",...) do
+ head, current = n_insert_before(head,current,(select(i,...)))
+ end
+ return head, current
+end
+
+function nodes.linked(...)
+ local head, last
+ for i=1,select("#",...) do
+ local next = select(i,...)
+ if next then
+ if head then
+ n_setfield(last,"next",next)
+ n_setfield(next,"prev",last)
+ else
+ head = next
+ end
+ last = n_find_tail(next) -- we could skip the last one
+ end
+ end
+ return head
+end
+
+function nodes.concat(list) -- consider tail instead of slide
+ local head, tail
+ for i=1,#list do
+ local li = list[i]
+ if li then
+ if head then
+ n_setfield(tail,"next",li)
+ n_setfield(li,"prev",tail)
+ else
+ head = li
+ end
+ tail = n_slide(li)
+ end
+ end
+ return head, tail
+end
+
+--[[
+At some point we ran into a problem that the glue specification
+of the zeropoint dimension was overwritten when adapting a glue spec
+node. This is a side effect of glue specs being shared. After a
+couple of hours tracing and debugging Taco and I came to the
+conclusion that it made no sense to complicate the spec allocator
+and settled on a writable flag. This all is a side effect of the
+fact that some glues use reserved memory slots (with the zeropoint
+glue being a noticeable one). So, next we wrap this into a function
+and hide it for the user. And yes, LuaTeX now gives a warning as
+well.
+]]--
+
+function nodes.writable_spec(n) -- not pool
+ local spec = n_getfield(n,"spec")
+ if not spec then
+ spec = n_copy_node(glue_spec)
+ n_setfield(n,"spec",spec)
+ elseif not n_getfield(spec,"writable") then
+ spec = n_copy_node(spec)
+ n_setfield(n,"spec",spec)
+ end
+ return spec
+end
+
+function nodes.copy_spec(old,free) -- also frees
+ if not old then
+ return n_new_node("glue_spec")
+ else
+ local new = n_copy_node(old)
+ if free and old.writable then
+ free_node(old)
+ end
+ return new
+ end
+end
+
+function nodes.free_spec(old)
+ if not old then
+ -- skip
+ elseif old.writable then
+ free_node(old)
+ else
+ -- skip
+ end
+end
+
+if gonuts then
+
+ function nodes.reference(n)
+ return n and tonut(n) or ""
+ end
+
+else
+
+ local left, space = lpeg.P("<"), lpeg.P(" ")
+
+ local reference = left * (1-left)^0 * left * space^0 * lpeg.C((1-space)^0)
+
+ function nodes.reference(n)
+ return n and lpegmatch(reference,tostring(n)) or ""
+ end
+
+end
+
+-- Here starts an experiment with metatables. Of course this only works with nodes
+-- wrapped in userdata with a metatable.
+--
+-- Nodes are kind of special in the sense that you need to keep an eye on creation
+-- and destruction. This is quite natural if you consider that changing the content
+-- of a node would also change any copy (or alias). As there are too many pitfalls
+-- we don't have this kind of support built in \LUATEX, which means that macro
+-- packages are free to provide their own. One can even use local variants.
+--
+-- n1 .. n2 : append nodes, no copies
+-- n1 * 5 : append 4 copies of nodes
+-- 5 + n1 : strip first 5 nodes
+-- n1 - 5 : strip last 5 nodes
+-- n1 + n2 : inject n2 after first of n1
+-- n1 - n2 : inject n2 before last of n1
+-- n1^2 : two copies of nodes (keep orginal)
+-- - n1 : reverse nodes
+-- n1/f : apply function to nodes
+
+-- local s = nodes.typesetters.tonodes
+--
+-- local function w(lst)
+-- context.dontleavehmode()
+-- context(lst)
+-- context.par()
+-- end
+--
+-- local n1 = s("a")
+-- local n2 = s("b")
+-- local n3 = s("c")
+-- local n4 = s("d")
+-- local n5 = s("e")
+-- local n6 = s("f")
+-- local n7 = s("g")
+--
+-- local n0 = n1 .. (n2 * 10).. n3 .. (5 * n4) .. n5 .. ( 5 * n6 ) .. n7 / function(n) n.char = string.byte("!") return n end
+--
+-- w(#n0)
+--
+-- w(n0)
+--
+-- local n1 = s("a") * 10
+-- local n2 = s("b") * 10
+--
+-- local n0 = ((5 + n1) .. (n2 - 5) )
+-- local n0 = - n0
+--
+-- local n0 = nil .. n0^3 .. nil
+--
+-- w(n0)
+--
+-- w ( s("a") + s("b") ) w ( s("a") + 4*s("b") ) w ( 4*s("a") + s("b") ) w ( 4*s("a") + 4*s("b") )
+-- w ( s("a") - s("b") ) w ( s("a") - 4*s("b") ) w ( 4*s("a") - s("b") ) w ( 4*s("a") - 4*s("b") )
+
+local n_remove_node = nodes.remove
+
+metatable.__concat = function(n1,n2) -- todo: accept nut on one end
+ if not n1 then
+ return n2
+ elseif not n2 then
+ return n1
+ elseif n1 == n2 then
+ -- or abort
+ return n2 -- or n2 * 2
+ else
+ local tail = n_find_tail(n1)
+ n_setfield(tail,"next",n2)
+ n_setfield(n2,"prev",tail)
+ return n1
+ end
+end
+
+metatable.__mul = function(n,multiplier)
+ if type(multiplier) ~= "number" then
+ n, multiplier = multiplier, n
+ end
+ if multiplier <= 1 then
+ return n
+ elseif n_getnext(n) then
+ local head
+ for i=2,multiplier do
+ local h = n_copy_list(n)
+ if head then
+ local t = n_find_tail(h)
+ n_setfield(t,"next",head)
+ n_setfield(head,"prev",t)
+ end
+ head = h
+ end
+ local t = n_find_tail(n)
+ n_setfield(t,"next",head)
+ n_setfield(head,"prev",t)
+ else
+ local head
+ for i=2,multiplier do
+ local c = n_copy_node(n)
+ if head then
+ n_setfield(c,"next",head)
+ n_setfield(head,"prev",c)
+ end
+ head = c
+ end
+ n_setfield(n,"next",head)
+ n_setfield(head,"prev",n)
+ end
+ return n
+end
+
+metatable.__sub = function(first,second)
+ if type(second) == "number" then
+ local tail = n_find_tail(first)
+ for i=1,second do
+ local prev = n_getfield(tail,"prev")
+ n_free_node(tail) -- can become flushlist/flushnode
+ if prev then
+ tail = prev
+ else
+ return nil
+ end
+ end
+ if tail then
+ n_setfield(tail,"next",nil)
+ return first
+ else
+ return nil
+ end
+ else
+ -- aaaaa - bbb => aaaabbba
+ local firsttail = n_find_tail(first)
+ local prev = n_getprev(firsttail)
+ if prev then
+ local secondtail = n_find_tail(second)
+ n_setfield(secondtail,"next",firsttail)
+ n_setfield(firsttail,"prev",ltail)
+ n_setfield(prev,"next",second)
+ n_setfield(second,"prev",prev)
+ return first
+ else
+ local secondtail = n_find_tail(second)
+ n_setfield(secondtail,"next",first)
+ n_setfield(first,"prev",ltail)
+ return second
+ end
+ end
+end
+
+metatable.__add = function(first,second)
+ if type(first) == "number" then
+ local head = second
+ for i=1,first do
+ local second = n_getnext(head)
+ n_free_node(head) -- can become flushlist/flushnode
+ if second then
+ head = second
+ else
+ return nil
+ end
+ end
+ if head then
+ n_setfield(head,"prev",nil)
+ return head
+ else
+ return nil
+ end
+ else
+ -- aaaaa + bbb => abbbaaaa
+ local next = n_getnext(first)
+ if next then
+ local secondtail = n_find_tail(second)
+ n_setfield(first,"next",second)
+ n_setfield(second,"prev",first)
+ n_setfield(secondtail,"next",next)
+ n_setfield(next,"prev",secondtail)
+ else
+ n_setfield(first,"next",second)
+ n_setfield(second,"prev",first)
+ end
+ return first
+ end
+end
+
+metatable.__len = function(current)
+ local length = 0
+ while current do
+ current = n_getnext(current)
+ length = length + 1
+ end
+ return length
+end
+
+metatable.__div = function(list,action)
+ return action(list) or list -- always a value
+end
+
+metatable.__pow = function(n,multiplier)
+ local tail = n
+ local head = nil
+ if getnext(n) then
+ if multiplier == 1 then
+ head = n_copy_list(n)
+ else
+ for i=1,multiplier do
+ local h = n_copy_list(n)
+ if head then
+ local t = n_find_tail(h)
+ n_setfield(t,"next",head)
+ n_setfield(head,"prev",t)
+ end
+ head = h
+ end
+ end
+ else
+ if multiplier == 1 then
+ head = n_copy_node(n)
+ else
+ for i=2,multiplier do
+ local c = n_copy_node(n)
+ if head then
+ n_setfield(head,"next",c)
+ n_setfield(c,"prev",head)
+ end
+ head = c
+ end
+ end
+ end
+ -- todo: tracing
+ return head
+end
+
+metatable.__unm = function(head)
+ local last = head
+ local first = head
+ local current = n_getnext(head)
+ while current do
+ local next = n_getnext(current)
+ n_setfield(first,"prev",current)
+ n_setfield(current,"next",first)
+ first = current
+ current = next
+ end
+ n_setfield(first,"prev",nil)
+ n_setfield(last,"next",nil)
+ return first
+end
+
+-- see node-nut.lua for more info on going nuts
+
+if not gonuts then
+
+ local nuts = { }
+ nodes.nuts = nuts
+
+ local function dummy(f) return f end
+
+ nodes.vianuts = dummy
+ nodes.vianodes = dummy
+
+ for k, v in next, nodes do
+ if type(v) == "function" then
+ nuts[k] = v
+ end
+ end
+
+end
+
+-- also handy
+
+local tonode = nodes.tonode
+local whatsit_code = nodecodes.whatsit
+local getfields = node.fields
+local sort = table.sort
+local whatsitkeys = { }
+local keys = { whatsit = whatsitkeys }
+local messyhack = table.tohash { -- temporary solution
+ nodecodes.attributelist,
+ nodecodes.attribute,
+ nodecodes.gluespec,
+ nodecodes.action,
+}
+
+table.setmetatableindex(keys,function(t,k)
+ local v = getfields(k)
+ if messyhack[k] then
+ for i=1,#v do
+ if v[i] == "subtype" then
+ remove(v,i)
+ break
+ end
+ end
+ end
+ if v[ 0] then v[#v+1] = "next" v[ 0] = nil end
+ if v[-1] then v[#v+1] = "prev" v[-1] = nil end
+ sort(v)
+ t[k] = v
+ return v
+end)
+
+table.setmetatableindex(whatsitkeys,function(t,k)
+ local v = getfields(whatsit_code,k)
+ if v[ 0] then v[#v+1] = "next" v[ 0] = nil end
+ if v[-1] then v[#v+1] = "prev" v[-1] = nil end
+ sort(v)
+ t[k] = v
+ return v
+end)
+
+local function nodefields(n)
+ n = tonode(n)
+ local id = n.id
+ if id == whatsit_code then
+ return whatsitkeys[n.subtype]
+ else
+ return keys[id]
+ end
+end
+
+nodes.keys = keys -- [id][subtype]
+nodes.fields = nodefields -- (n)
+
+-- one issue solved in flush_node:
+--
+-- case glue_spec_node:
+-- if (glue_ref_count(p)!=null) {
+-- decr(glue_ref_count(p));
+-- return ;
+-- /*
+-- } else if (! valid_node(p)) {
+-- return ;
+-- */
+-- /*
+-- } else {
+-- free_node(p, get_node_size(type(p), subtype(p)));
+-- return ;
+-- */
+-- }
+-- break ;
+--
+-- or:
+--
+-- case glue_spec_node:
+-- if (glue_ref_count(p)!=null) {
+-- decr(glue_ref_count(p));
+-- return ;
+-- } else if (valid_node(p)) {
+-- free_node(p, get_node_size(type(p), subtype(p)));
+-- return ;
+-- } else {
+-- break ;
+-- }
diff --git a/Master/texmf-dist/tex/context/base/node-mig.lua b/Master/texmf-dist/tex/context/base/node-mig.lua
index 9fc35a04820..41f95be4590 100644
--- a/Master/texmf-dist/tex/context/base/node-mig.lua
+++ b/Master/texmf-dist/tex/context/base/node-mig.lua
@@ -6,15 +6,32 @@ if not modules then modules = { } end modules ['node-mig'] = {
license = "see context related readme files"
}
+-- todo: insert_after
+
local format = string.format
-local attributes, nodes, node = attributes, nodes, node
+local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
-local remove_nodes = nodes.remove
+local report_nodes = logs.reporter("nodes","migrations")
-local nodecodes = nodes.nodecodes
+local attributes = attributes
+local nodes = nodes
local tasks = nodes.tasks
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local remove_node = nuts.remove
+
+local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local insert_code = nodecodes.ins
@@ -22,10 +39,6 @@ local mark_code = nodecodes.mark
local a_migrated = attributes.private("migrated")
-local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
-
-local report_nodes = logs.reporter("nodes","migrations")
-
local migrate_inserts, migrate_marks, inserts_too
local t_inserts, t_marks, t_sweeps = 0, 0, 0
@@ -33,32 +46,42 @@ local t_inserts, t_marks, t_sweeps = 0, 0, 0
local function locate(head,first,last,ni,nm)
local current = head
while current do
- local id = current.id
+ local id = getid(current)
if id == vlist_code or id == hlist_code then
- current.list, first, last, ni, nm = locate(current.list,first,last,ni,nm)
- current = current.next
+ local list = getlist(current)
+ if list then
+ list, first, last, ni, nm = locate(list,first,last,ni,nm)
+ setfield(current,"list",list)
+ end
+ current = getnext(current)
elseif migrate_inserts and id == insert_code then
local insert
- head, current, insert = remove_nodes(head,current)
- insert.next = nil
+ head, current, insert = remove_node(head,current)
+ setfield(insert,"next",nil)
if first then
- insert.prev, last.next = last, insert
+ setfield(insert,"prev",last)
+ setfield(last,"next",insert)
else
- insert.prev, first = nil, insert
+ setfield(insert,"prev",nil)
+ first = insert
end
- last, ni = insert, ni + 1
+ last = insert
+ ni = ni + 1
elseif migrate_marks and id == mark_code then
local mark
- head, current, mark = remove_nodes(head,current)
- mark.next = nil
+ head, current, mark = remove_node(head,current)
+ setfield(mark,"next",nil)
if first then
- mark.prev, last.next = last, mark
+ setfield(mark,"prev",last)
+ setfield(last,"next",mark)
else
- mark.prev, first = nil, mark
+ setfield(mark,"prev",nil)
+ first = mark
end
- last, nm = mark, nm + 1
+ last = mark
+ nm = nm + 1
else
- current= current.next
+ current = getnext(current)
end
end
return head, first, last, ni, nm
@@ -70,39 +93,43 @@ function nodes.handlers.migrate(head,where)
if trace_migrations then
report_nodes("migration sweep %a",where)
end
- local current = head
+ local current = tonut(head)
while current do
- local id = current.id
+ local id = getid(current)
-- inserts_too is a temp hack, we should only do them when it concerns
-- newly placed (flushed) inserts
- if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not current[a_migrated] then
- current[a_migrated] = 1
+ if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not getattr(current,a_migrated) then
+ setattr(current,a_migrated,1)
t_sweeps = t_sweeps + 1
- local h = current.list
+ local h = getlist(current)
local first, last, ni, nm
while h do
- local id = h.id
+ local id = getid(h)
if id == vlist_code or id == hlist_code then
h, first, last, ni, nm = locate(h,first,last,0,0)
end
- h = h.next
+ h = getnext(h)
end
if first then
- t_inserts, t_marks = t_inserts + ni, t_marks + nm
+ t_inserts = t_inserts + ni
+ t_marks = t_marks + nm
if trace_migrations and (ni > 0 or nm > 0) then
report_nodes("sweep %a, container %a, %s inserts and %s marks migrated outwards during %a",
t_sweeps,nodecodes[id],ni,nm,where)
end
- -- inserts after head
- local n = current.next
+ -- inserts after head, use insert_after
+ local n = getnext(current)
if n then
- last.next, n.prev = n, last
+ setfield(last,"next",n)
+ setfield(n,"prev",last)
end
- current.next, first.prev = first, current
- done, current = true, last
+ setfield(current,"next",first)
+ setfield(first,"prev",current)
+ done = true
+ current = last
end
end
- current = current.next
+ current = getnext(next)
end
return head, done
end
diff --git a/Master/texmf-dist/tex/context/base/node-nut.lua b/Master/texmf-dist/tex/context/base/node-nut.lua
new file mode 100644
index 00000000000..2b4e9968c6e
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/node-nut.lua
@@ -0,0 +1,702 @@
+if not modules then modules = { } end modules ['node-met'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Here starts some more experimental code that Luigi and I use in a next stage of
+-- exploring and testing potential speedups in the engines. This code is not meant
+-- for users and can change (or be removed) any moment. During the experiments I'll
+-- do my best to keep the code as fast as possible by using two codebases. See
+-- about-fast.pdf for some more info about impacts. Although key based access has
+-- more charm, function based is somewhat faster and has more potential for future
+-- speedups.
+
+-- This next iteration is flagged direct because we avoid user data which has a price
+-- in allocation and metatable tagging. Although in this stage we pass numbers around
+-- future versions might use light user data, so never depend on what direct function
+-- return. Using the direct approach had some speed advantages but you loose the key
+-- based access. The speed gain is only measurable in cases with lots of access. For
+-- instance when typesettign arabic with advanced fonts, we're talking of many millions
+-- of function calls and there we can get a 30\% or more speedup. On average complex
+-- \CONTEXT\ runs the gain can be 10\% to 15\% percent. Because mixing the two models
+-- (here we call then nodes and nuts) is not possible you need to cast either way which
+-- has a penalty. Also, error messages in nuts mode are less clear and \LUATEX\ will
+-- often simply abort when you make mistakes of mix the models. So, development (at least
+-- in \CONTEXT) can be done in node mode and not in nuts mode. Only robust code will
+-- be turned nuts afterwards and quite likely not all code. The official \LUATEX\ api
+-- to nodes is userdata!
+--
+-- Listening to 'lunatic soul' at the same time helped wrapping my mind around the mixed
+-- usage of both models. Just for the record: the potential of the direct approach only
+-- became clear after experimenting for weeks and partly adapting code. It is one of those
+-- (sub)projects where you afterwards wonder if it was worth the trouble, but users that
+-- rely on lots of complex functionality and font support will probably notice the speedup.
+--
+-- luatex luajittex
+-- ------------- ----- -------------------- ---------------------------------
+-- name pages old new pct old new pct
+-- ------------- ----- -------------------- ---------------------------------
+-- fonts-mkiv 166 9.3 7.7/7.4 17.2 7.4 (37.5) 5.9/5.7 (55.6) 20.3
+-- about 60 3.3 2.7/2.6 20.4 2.5 (39.5) 2.1 (57.0) 23.4
+-- arabic-001 61 25.3 15.8 18.2 15.3 (46.7) 6.8 (54.7) 16.0
+-- torture-001 300 21.4 11.4 24.2 13.9 (35.0) 6.3 (44.7) 22.2
+--
+-- so:
+--
+-- - we run around 20% faster on documents of average complexity and gain more when
+-- dealing with scripts like arabic and such
+-- - luajittex benefits a bit more so a luajittex job can (in principle) now be much
+-- faster
+-- - if we reason backwards, and take luajittex as norm we get 1:2:3 on some jobs for
+-- luajittex direct:luatex direct:luatex normal i.e. we can be 3 times faster
+-- - keep in mind that these are tex/lua runs so the real gain at the lua end is much
+-- larger
+--
+-- Because we can fake direct mode a little bit by using the fast getfield and setfield
+-- at the cost of wrapped getid and alike, we still are running quite ok. As we could gain
+-- some 5% with fast mode, we can sacrifice some on wrappers when we use a few fast core
+-- functions. This means that simulated direct mode runs font-mkiv in 9.1 seconds (we could
+-- get down to 8.7 seconds in fast mode) and that we can migrate slowely to direct mode.
+--
+-- The following measurements are from 2013-07-05 after adapting some 47 files to nuts. Keep
+-- in mind that the old binary can fake a fast getfield and setfield but that the other
+-- getters are wrapped functions. The more we have, the slower it gets.
+--
+-- fonts about arabic
+-- old mingw, indexed plus some functions : 8.9 3.2 20.3
+-- old mingw, fake functions : 9.9 3.5 27.4
+-- new mingw, node functions : 9.0 3.1 20.8
+-- new mingw, indexed plus some functions : 8.6 3.1 19.6
+-- new mingw, direct functions : 7.5 2.6 14.4
+--
+-- \starttext \dorecurse{1000}{test\page} \stoptext :
+--
+-- luatex 560 pps
+-- luajittex 600 pps
+--
+-- \setupbodyfont[pagella]
+--
+-- \edef\zapf{\cldcontext{context(io.loaddata(resolvers.findfile("zapf.tex")))}}
+--
+-- \starttext \dorecurse{1000}{\zapf\par} \stoptext
+--
+-- luatex 3.9 sec / 54 pps
+-- luajittex 2.3 sec / 93 pps
+
+local nodes = nodes
+local gonuts = nodes.gonuts
+local direct = node.direct
+
+if type(direct) ~= "table" then
+ return
+elseif gonuts then
+ statistics.register("running in nuts mode", function() return "yes" end)
+else
+ statistics.register("running in nuts mode", function() return "no" end)
+ return
+end
+
+local texget = tex.get
+
+local nodecodes = nodes.nodecodes
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+
+local nuts = nodes.nuts or { }
+nodes.nuts = nuts
+
+nodes.is_node = direct.is_node or function() return true end
+nodes.is_direct = direct.is_direct or function() return false end
+nodes.is_nut = nodes.is_direct
+
+-- casters
+
+local tonode = direct.tonode or function(n) return n end
+local tonut = direct.todirect or function(n) return n end
+
+nuts.tonode = tonode
+nuts.tonut = tonut
+
+nodes.tonode = tonode
+nodes.tonut = tonut
+
+-- getters
+
+nuts.getfield = direct.getfield
+nuts.getnext = direct.getnext
+nuts.getprev = direct.getprev
+nuts.getid = direct.getid
+nuts.getattr = direct.has_attribute or direct.getfield
+nuts.getchar = direct.getchar
+nuts.getfont = direct.getfont
+nuts.getsubtype = direct.getsubtype
+nuts.getlist = direct.getlist -- only hlist and vlist !
+nuts.getleader = direct.getleader
+
+-- local dgf = direct.getfield function nuts.getlist(n) return dgf(n,"list") end
+
+-- setters
+
+nuts.setfield = direct.setfield
+nuts.setattr = direct.set_attribute or setfield
+
+nuts.getbox = direct.getbox
+nuts.setbox = direct.setbox
+nuts.getskip = direct.getskip or function(s) return tonut(texget(s)) end
+
+-- helpers
+
+nuts.tostring = direct.tostring
+nuts.copy = direct.copy
+nuts.copy_list = direct.copy_list
+nuts.delete = direct.delete
+nuts.dimensions = direct.dimensions
+nuts.end_of_math = direct.end_of_math
+nuts.flush_list = direct.flush_list
+nuts.flush_node = direct.flush_node
+nuts.free = direct.free
+nuts.insert_after = direct.insert_after
+nuts.insert_before = direct.insert_before
+nuts.hpack = direct.hpack
+nuts.new = direct.new
+nuts.tail = direct.tail
+nuts.traverse = direct.traverse
+nuts.traverse_id = direct.traverse_id
+nuts.slide = direct.slide
+nuts.writable_spec = direct.writable_spec
+nuts.vpack = direct.vpack
+nuts.is_node = direct.is_node
+nuts.is_direct = direct.is_direct
+nuts.is_nut = direct.is_direct
+nuts.first_glyph = direct.first_glyph
+nuts.first_character = direct.first_character
+nuts.has_glyph = direct.has_glyph or direct.first_glyph
+
+nuts.current_attr = direct.current_attr
+nuts.do_ligature_n = direct.do_ligature_n
+nuts.has_field = direct.has_field
+nuts.last_node = direct.last_node
+nuts.usedlist = direct.usedlist
+nuts.protrusion_skippable = direct.protrusion_skippable
+nuts.write = direct.write
+
+nuts.has_attribute = direct.has_attribute
+nuts.set_attribute = direct.set_attribute
+nuts.unset_attribute = direct.unset_attribute
+
+nuts.protect_glyphs = direct.protect_glyphs
+nuts.unprotect_glyphs = direct.unprotect_glyphs
+
+-- placeholders
+
+if not direct.kerning then
+
+ local n_kerning = node.kerning
+
+ function nuts.kerning(head)
+ return tonode(n_kerning(tonut(head)))
+ end
+
+end
+
+if not direct.ligaturing then
+
+ local n_ligaturing = node.ligaturing
+
+ function nuts.ligaturing(head)
+ return tonode(n_ligaturing(tonut(head)))
+ end
+
+end
+
+if not direct.mlist_to_hlist then
+
+ local n_mlist_to_hlist = node.mlist_to_hlist
+
+ function nuts.mlist_to_hlist(head)
+ return tonode(n_mlist_to_hlist(tonut(head)))
+ end
+
+end
+
+--
+
+local d_remove_node = direct.remove
+local d_free_node = direct.free
+local d_getfield = direct.getfield
+local d_setfield = direct.setfield
+local d_getnext = direct.getnext
+local d_getprev = direct.getprev
+local d_getid = direct.getid
+local d_getlist = direct.getlist
+local d_find_tail = direct.tail
+local d_insert_after = direct.insert_after
+local d_insert_before = direct.insert_before
+local d_slide = direct.slide
+local d_copy_node = direct.copy
+local d_traverse = direct.traverse
+
+local function remove(head,current,free_too)
+ local t = current
+ head, current = d_remove_node(head,current)
+ if not t then
+ -- forget about it
+ elseif free_too then
+ d_free_node(t)
+ t = nil
+ else
+ d_setfield(t,"next",nil) -- not that much needed (slows down unless we check the source on this)
+ d_setfield(t,"prev",nil) -- not that much needed (slows down unless we check the source on this)
+ end
+ return head, current, t
+end
+
+-- bad: we can have prev's being glue_spec
+
+-- local function remove(head,current,free_too) -- d_remove_node does a slide which can fail
+-- local prev = d_getprev(current) -- weird
+-- local next = d_getnext(current)
+-- if next then
+-- -- print("!!!!!!!! prev is gluespec",
+-- -- nodes.nodecodes[d_getid(current)],
+-- -- nodes.nodecodes[d_getid(next)],
+-- -- nodes.nodecodes[d_getid(prev)])
+-- d_setfield(prev,"next",next)
+-- d_setfield(next,"prev",prev)
+-- else
+-- d_setfield(prev,"next",nil)
+-- end
+-- if free_too then
+-- d_free_node(current)
+-- current = nil
+-- else
+-- d_setfield(current,"next",nil) -- use this fact !
+-- d_setfield(current,"prev",nil) -- use this fact !
+-- end
+-- if head == current then
+-- return next, next, current
+-- else
+-- return head, next, current
+-- end
+-- end
+
+nuts.remove = remove
+
+function nuts.delete(head,current)
+ return remove(head,current,true)
+end
+
+function nuts.replace(head,current,new) -- no head returned if false
+ if not new then
+ head, current, new = false, head, current
+ end
+ local prev = d_getprev(current)
+ local next = d_getnext(current)
+ if next then
+ d_setfield(new,"next",next)
+ d_setfield(next,"prev",new)
+ end
+ if prev then
+ d_setfield(new,"prev",prev)
+ d_setfield(prev,"next",new)
+ end
+ if head then
+ if head == current then
+ head = new
+ end
+ d_free_node(current)
+ return head, new
+ else
+ d_free_node(current)
+ return new
+ end
+end
+
+local function count(stack,flat)
+ local n = 0
+ while stack do
+ local id = d_getid(stack)
+ if not flat and id == hlist_code or id == vlist_code then
+ local list = d_getlist(stack)
+ if list then
+ n = n + 1 + count(list) -- self counts too
+ else
+ n = n + 1
+ end
+ else
+ n = n + 1
+ end
+ stack = d_getnext(stack)
+ end
+ return n
+end
+
+nuts.count = count
+
+function nuts.append(head,current,...)
+ for i=1,select("#",...) do
+ head, current = d_insert_after(head,current,(select(i,...)))
+ end
+ return head, current
+end
+
+function nuts.prepend(head,current,...)
+ for i=1,select("#",...) do
+ head, current = d_insert_before(head,current,(select(i,...)))
+ end
+ return head, current
+end
+
+function nuts.linked(...)
+ local head, last
+ for i=1,select("#",...) do
+ local next = select(i,...)
+ if next then
+ if head then
+ d_setfield(last,"next",next)
+ d_setfield(next,"prev",last)
+ else
+ head = next
+ end
+ last = d_find_tail(next) -- we could skip the last one
+ end
+ end
+ return head
+end
+
+function nuts.concat(list) -- consider tail instead of slide
+ local head, tail
+ for i=1,#list do
+ local li = list[i]
+ if li then
+ if head then
+ d_setfield(tail,"next",li)
+ d_setfield(li,"prev",tail)
+ else
+ head = li
+ end
+ tail = d_slide(li)
+ end
+ end
+ return head, tail
+end
+
+function nuts.writable_spec(n) -- not pool
+ local spec = d_getfield(n,"spec")
+ if not spec then
+ spec = d_copy_node(glue_spec)
+ d_setfield(n,"spec",spec)
+ elseif not d_getfield(spec,"writable") then
+ spec = d_copy_node(spec)
+ d_setfield(n,"spec",spec)
+ end
+ return spec
+end
+
+function nuts.reference(n)
+ return n or ""
+end
+
+-- quick and dirty tracing of nuts
+
+-- for k, v in next, nuts do
+-- if string.find(k,"box") then
+-- nuts[k] = function(...) print(k,...) return v(...) end
+-- end
+-- end
+
+function nodes.vianuts (f) return function(n,...) return tonode(f(tonut (n),...)) end end
+function nodes.vianodes(f) return function(n,...) return tonut (f(tonode(n),...)) end end
+
+nuts.vianuts = nodes.vianuts
+nuts.vianodes = nodes.vianodes
+
+-- for k, v in next, nuts do
+-- if type(v) == "function" then
+-- if not string.find(k,"^[sg]et") and not string.find(k,"^to") then
+-- local f = v
+-- nuts[k] = function(...) print("d",k,...) return f(...) end
+-- end
+-- end
+-- end
+
+-- for k, v in next, nodes do
+-- if type(v) == "function" then
+-- if not string.find(k,"^[sg]et") and not string.find(k,"^to") then
+-- local f = v
+-- nodes[k] = function(...) print("n",k,...) return f(...) end
+-- end
+-- end
+-- end
+
+-- function nodes.insert_before(h,c,n)
+-- if c then
+-- if c == h then
+-- n_setfield(n,"next",h)
+-- n_setfield(n,"prev",nil)
+-- n_setfield(h,"prev",n)
+-- else
+-- local cp = n_getprev(c)
+-- n_setfield(n,"next",c)
+-- n_setfield(n,"prev",cp)
+-- if cp then
+-- n_setfield(cp,"next",n)
+-- end
+-- n_setfield(c,"prev",n)
+-- return h, n
+-- end
+-- end
+-- return n, n
+-- end
+
+-- function nodes.insert_after(h,c,n)
+-- if c then
+-- local cn = n_getnext(c)
+-- if cn then
+-- n_setfield(n,"next",cn)
+-- n_setfield(cn,"prev",n)
+-- else
+-- n_setfield(n,"next",nil)
+-- end
+-- n_setfield(c,"next",n)
+-- n_setfield(n,"prev",c)
+-- return h, n
+-- end
+-- return n, n
+-- end
+
+function nodes.insert_list_after(h,c,n)
+ local t = n_tail(n)
+ if c then
+ local cn = n_getnext(c)
+ if cn then
+ n_setfield(t,"next",cn)
+ n_setfield(cn,"prev",t)
+ else
+ n_setfield(t,"next",nil)
+ end
+ n_setfield(c,"next",n)
+ n_setfield(n,"prev",c)
+ return h, n
+ end
+ return n, t
+end
+
+-- function nuts.insert_before(h,c,n)
+-- if c then
+-- if c == h then
+-- d_setfield(n,"next",h)
+-- d_setfield(n,"prev",nil)
+-- d_setfield(h,"prev",n)
+-- else
+-- local cp = d_getprev(c)
+-- d_setfield(n,"next",c)
+-- d_setfield(n,"prev",cp)
+-- if cp then
+-- d_setfield(cp,"next",n)
+-- end
+-- d_setfield(c,"prev",n)
+-- return h, n
+-- end
+-- end
+-- return n, n
+-- end
+
+-- function nuts.insert_after(h,c,n)
+-- if c then
+-- local cn = d_getnext(c)
+-- if cn then
+-- d_setfield(n,"next",cn)
+-- d_setfield(cn,"prev",n)
+-- else
+-- d_setfield(n,"next",nil)
+-- end
+-- d_setfield(c,"next",n)
+-- d_setfield(n,"prev",c)
+-- return h, n
+-- end
+-- return n, n
+-- end
+
+function nuts.insert_list_after(h,c,n)
+ local t = d_tail(n)
+ if c then
+ local cn = d_getnext(c)
+ if cn then
+ d_setfield(t,"next",cn)
+ d_setfield(cn,"prev",t)
+ else
+ d_setfield(t,"next",nil)
+ end
+ d_setfield(c,"next",n)
+ d_setfield(n,"prev",c)
+ return h, n
+ end
+ return n, t
+end
+
+-- test code only
+
+-- collectranges and mix
+
+local report = logs.reporter("sliding")
+
+local function message(detail,head,current,previous)
+ report("error: %s, current: %s:%s, previous: %s:%s, list: %s, text: %s",
+ detail,
+ nodecodes[d_getid(current)],
+ current,
+ nodecodes[d_getid(previous)],
+ previous,
+ nodes.idstostring(head),
+ nodes.listtoutf(head)
+ )
+ utilities.debugger.showtraceback(report)
+end
+
+local function warn()
+ report()
+ report("warning: the slide tracer is enabled")
+ report()
+ warn = false
+end
+
+local function tracedslide(head)
+ if head then
+ if warn then
+ warn()
+ end
+ local next = d_getnext(head)
+ if next then
+ local prev = head
+ for n in d_traverse(next) do
+ local p = d_getprev(n)
+ if not p then
+ message("unset",head,n,prev)
+ -- break
+ elseif p ~= prev then
+ message("wrong",head,n,prev)
+ -- break
+ end
+ prev = n
+ end
+ end
+ return d_slide(head)
+ end
+end
+
+local function nestedtracedslide(head,level) -- no sliding !
+ if head then
+ if warn then
+ warn()
+ end
+ local id = d_getid(head)
+ local next = d_getnext(head)
+ if next then
+ report("%whead:%s",level or 0,nodecodes[id])
+ local prev = head
+ for n in d_traverse(next) do
+ local p = d_getprev(n)
+ if not p then
+ message("unset",head,n,prev)
+ -- break
+ elseif p ~= prev then
+ message("wrong",head,n,prev)
+ -- break
+ end
+ prev = n
+ local id = d_getid(n)
+ if id == hlist_code or id == vlist_code then
+ nestedtracedslide(d_getlist(n),(level or 0) + 1)
+ end
+ end
+ elseif id == hlist_code or id == vlist_code then
+ report("%wlist:%s",level or 0,nodecodes[id])
+ nestedtracedslide(d_getlist(head),(level or 0) + 1)
+ end
+ -- return d_slide(head)
+ end
+end
+
+local function untracedslide(head)
+ if head then
+ if warn then
+ warn()
+ end
+ local next = d_getnext(head)
+ if next then
+ local prev = head
+ for n in d_traverse(next) do
+ local p = d_getprev(n)
+ if not p then
+ return "unset", d_getid(n)
+ elseif p ~= prev then
+ return "wrong", d_getid(n)
+ end
+ prev = n
+ end
+ end
+ return d_slide(head)
+ end
+end
+
+nuts.tracedslide = tracedslide
+nuts.untracedslide = untracedslide
+nuts.nestedtracedslide = nestedtracedslide
+
+-- nuts.slide = tracedslide
+
+-- this might move
+
+local propertydata = direct.get_properties_table and direct.get_properties_table()
+
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+if propertydata then
+
+ nodes.properties = {
+ data = propertydata,
+ }
+
+ direct.set_properties_mode(true,false)
+ -- direct.set_properties_mode(true,true)
+
+ -- experimental code with respect to copying attributes has been removed
+ -- as it doesn't pay of (most attributes are only accessed once anyway)
+
+ nuts.getprop = function(n,k)
+ local p = propertydata[n]
+ if p then
+ return p[k]
+ end
+ end
+
+ nuts.setprop = function(n,k,v)
+ if v then
+ local p = propertydata[n]
+ if p then
+ p[k] = v
+ else
+ propertydata[n] = { [k] = v }
+ end
+ end
+ end
+
+ nodes.setprop = nodes.setproperty
+ nodes.getprop = nodes.getproperty
+
+else
+
+ -- for testing and simple cases
+
+ nuts.getprop = getattr
+ nuts.setprop = setattr
+
+ nodes.setprop = getattr
+ nodes.getprop = setattr
+
+end
diff --git a/Master/texmf-dist/tex/context/base/node-ppt.lua b/Master/texmf-dist/tex/context/base/node-ppt.lua
new file mode 100644
index 00000000000..c8cba8566b3
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/node-ppt.lua
@@ -0,0 +1,476 @@
+if not modules then modules = { } end modules ['node-ppt'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is all very exeperimental and likely to change.
+
+local next, type, unpack, load = next, type, table.unpack, load
+
+local serialize = table.serialize
+local formatters = string.formatters
+
+local report = logs.reporter("properties")
+local report_setting = logs.reporter("properties","setting")
+local trace_setting = false trackers.register("properties.setting", function(v) trace_setting = v end)
+
+-- report("using experimental properties")
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local getid = nuts.getid
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getsubtype = nuts.getsubtype
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getlist = nuts.getlist
+local flushnode = nuts.flush
+local removenode = nuts.remove
+local traverse = nuts.traverse
+local traverse_id = nuts.traverse_id
+
+local nodecodes = nodes.nodecodes
+local whatsitcodes = nodes.whatsitcodes
+
+local whatsit_code = nodecodes.whatsit
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local userdefined_code = whatsitcodes.userdefined
+local localpar_code = whatsitcodes.localpar
+
+local nodepool = nodes.pool
+local new_usernumber = nodepool.usernumber
+
+local nutpool = nuts.pool
+local nut_usernumber = nutpool.usernumber
+
+local variables = interfaces.variables
+local v_before = variables.before
+local v_after = variables.after
+local v_here = variables.here
+
+local cache = { }
+local nofslots = 0
+local property_id = nodepool.userids["property"]
+
+local properties = nodes.properties if not properties then return end -- temp
+local propertydata = properties.data
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+if not propertydata then
+ return
+end
+
+-- management
+
+local function register(where,data,...)
+ if not data then
+ data = where
+ where = v_after
+ end
+ if data then
+ local data = { where, data, ... }
+ nofslots = nofslots + 1
+ if nofslots > 1 then
+ cache[nofslots] = data
+ else
+ -- report("restarting attacher")
+ cache = { data } -- also forces collection
+ end
+ return new_usernumber(property_id,nofslots)
+ end
+end
+
+local writenode = node.write
+local flushnode = context.flushnode
+
+function commands.deferredproperty(...)
+-- context(register(...))
+ flushnode(register(...))
+end
+
+
+function commands.immediateproperty(...)
+ writenode(register(...))
+end
+
+commands.attachproperty = commands.deferredproperty
+
+local actions = { } properties.actions = actions
+
+table.setmetatableindex(actions,function(t,k)
+ report("unknown property action %a",k)
+ local v = function() end
+ return v
+end)
+
+local f_delayed = formatters["return function(target,head,where,propdata,parent) %s end"]
+local f_immediate = formatters["return function(target,head,where,propdata) %s end"]
+
+local nofdelayed = 0 -- better is to keep track of it per page ... we can have deleted nodes with properties
+
+function actions.delayed(target,head,where,propdata,code,...) -- this one is used at the tex end
+-- local kind = type(code)
+-- if kind == "string" then
+-- code, err = load(f_delayed(code))
+-- if code then
+-- code = code()
+-- end
+-- elseif kind ~= "function" then
+-- code = nil
+-- end
+ if code then
+ local delayed = propdata.delayed
+ if delayed then
+ delayed[#delayed+1] = { where, code, ... }
+ else
+ propdata.delayed = { { where, code, ... } }
+ nofdelayed = nofdelayed + 1
+ end
+ end
+end
+
+function actions.fdelayed(target,head,where,propdata,code,...) -- this one is used at the tex end
+-- local kind = type(code)
+-- if kind == "string" then
+-- code, err = load(f_delayed(code))
+-- if code then
+-- code = code()
+-- end
+-- elseif kind ~= "function" then
+-- code = nil
+-- end
+ if code then
+ local delayed = propdata.delayed
+ if delayed then
+ delayed[#delayed+1] = { false, code, ... }
+ else
+ propdata.delayed = { { false, code, ... } }
+ nofdelayed = nofdelayed + 1
+ end
+ end
+end
+
+function actions.immediate(target,head,where,propdata,code,...) -- this one is used at the tex end
+ local kind = type(code)
+ if kind == "string" then
+ local f = f_immediate(code)
+ local okay, err = load(f)
+ if okay then
+ local h = okay()(target,head,where,propdata,...)
+ if h and h ~= head then
+ return h
+ end
+ end
+ elseif kind == "function" then
+ local h = code()(target,head,where,propdata,...)
+ if h and h ~= head then
+ return h
+ end
+ end
+end
+
+-- another experiment (a table or function closure are equally efficient); a function
+-- is easier when we want to experiment with different (compatible) implementations
+
+-- function nodes.nuts.pool.deferredfunction(...)
+-- nofdelayed = nofdelayed + 1
+-- local n = nut_usernumber(property_id,0)
+-- propertydata[n] = { deferred = { ... } }
+-- return n
+-- end
+
+-- function nodes.nuts.pool.deferredfunction(f)
+-- nofdelayed = nofdelayed + 1
+-- local n = nut_usernumber(property_id,0)
+-- propertydata[n] = { deferred = f }
+-- return n
+-- end
+
+-- maybe actions will get parent too
+
+local function delayed(head,parent) -- direct based
+ for target in traverse(head) do
+ local p = propertydata[target]
+ if p then
+ -- local deferred = p.deferred -- kind of late lua (but too soon as we have no access to pdf.h/v)
+ -- if deferred then
+ -- -- if #deferred > 0 then
+ -- -- deferred[1](unpack(deferred,2))
+ -- -- else
+ -- -- deferred[1]()
+ -- -- end
+ -- deferred()
+ -- p.deferred = false
+ -- if nofdelayed == 1 then
+ -- nofdelayed = 0
+ -- return head
+ -- else
+ -- nofdelayed = nofdelayed - 1
+ -- end
+ -- else
+ local delayed = p.delayed
+ if delayed then
+ for i=1,#delayed do
+ local d = delayed[i]
+ local code = d[2]
+ local kind = type(code)
+ if kind == "string" then
+ code, err = load(f_delayed(code))
+ if code then
+ code = code()
+ end
+ end
+ local where = d[1]
+ if where then
+ local h = code(target,where,head,p,parent,unpack(d,3)) -- target where propdata head parent
+ if h and h ~= head then
+ head = h
+ end
+ else
+ code(unpack(d,3))
+ end
+ end
+ p.delayed = nil
+ if nofdelayed == 1 then
+ nofdelayed = 0
+ return head
+ else
+ nofdelayed = nofdelayed - 1
+ end
+ end
+ -- end
+ end
+ local id = getid(target)
+ if id == hlist_code or id == vlist_code then
+ local list = getlist(target)
+ if list then
+ local done = delayed(list,parent)
+ if done then
+ setfield(target,"list",done)
+ end
+ if nofdelayed == 0 then
+ return head
+ end
+ end
+ else
+ -- maybe also some more lists? but we will only use this for some
+ -- special cases .. who knows
+ end
+ end
+ return head
+end
+
+function properties.delayed(head) --
+ if nofdelayed > 0 then
+ -- if next(propertydata) then
+ starttiming(properties)
+ head = delayed(tonut(head))
+ stoptiming(properties)
+ return tonode(head), true -- done in shipout anyway
+ -- else
+ -- delayed = 0
+ -- end
+ end
+ return head, false
+end
+
+-- more explicit ones too
+
+local anchored = {
+ [v_before] = function(n)
+ while n do
+ n = getprev(n)
+ if getid(n) == whatsit_code and getsubtype(n) == user_code and getfield(n,"user_id") == property_id then
+ -- continue
+ else
+ return n
+ end
+ end
+ end,
+ [v_after] = function(n)
+ while n do
+ n = getnext(n)
+ if getid(n) == whatsit_code then
+ local subtype = getsubtype(n)
+ if (subtype == userdefined_code and getfield(n,"user_id") == property_id) then
+ -- continue
+ elseif subtype == localpar_code then
+ -- continue .. can't happen anyway as we cannot write
+ else
+ return n
+ end
+ else
+ return n
+ end
+ end
+ end,
+ [v_here] = function(n)
+ -- todo
+ end,
+}
+
+table.setmetatableindex(anchored,function(t,k)
+ v = anchored[v_after]
+ t[k] = v
+ return v
+end)
+
+function properties.attach(head)
+
+ if nofslots <= 0 then
+ return head, false
+ end
+
+ local done = false
+ local last = nil
+ local head = tonut(head)
+
+ starttiming(properties)
+
+ for source in traverse_id(whatsit_code,head) do
+ if getsubtype(source) == userdefined_code then
+ if last then
+ removenode(head,last,true)
+ last = nil
+ end
+ if getfield(source,"user_id") == property_id then
+ local slot = getfield(source,"value")
+ local data = cache[slot]
+ if data then
+ cache[slot] = nil
+ local where = data[1]
+ local target = anchored[where](source)
+ if target then
+ local first = data[2]
+ local method = type(first)
+ local p_target = propertydata[target]
+ local p_source = propertydata[source]
+ if p_target then
+ if p_source then
+ for k, v in next, p_source do
+ p_target[k] = v
+ end
+ end
+ if method == "table" then
+ for k, v in next, first do
+ p_target[k] = v
+ end
+ elseif method == "function" then
+ first(target,head,where,p_target,unpack(data,3))
+ elseif method == "string" then
+ actions[first](target,head,where,p_target,unpack(data,3))
+ end
+ elseif p_source then
+ if method == "table" then
+ propertydata[target] = p_source
+ for k, v in next, first do
+ p_source[k] = v
+ end
+ elseif method == "function" then
+ propertydata[target] = p_source
+ first(target,head,where,p_source,unpack(data,3))
+ elseif method == "string" then
+ propertydata[target] = p_source
+ actions[first](target,head,where,p_source,unpack(data,3))
+ end
+ else
+ if method == "table" then
+ propertydata[target] = first
+ elseif method == "function" then
+ local t = { }
+ propertydata[target] = t
+ first(target,head,where,t,unpack(data,3))
+ elseif method == "string" then
+ local t = { }
+ propertydata[target] = t
+ actions[first](target,head,where,t,unpack(data,3))
+ end
+ end
+ if trace_setting then
+ report_setting("node %i, id %s, data %s",
+ target,nodecodes[getid(target)],serialize(propertydata[target],false))
+ end
+ end
+ if nofslots == 1 then
+ nofslots = 0
+ last = source
+ break
+ else
+ nofslots = nofslots - 1
+ end
+ end
+ last = source
+ end
+ end
+ end
+
+ if last then
+ removenode(head,last,true)
+ end
+
+ stoptiming(properties)
+
+ return head, done
+
+end
+
+local tasks = nodes.tasks
+
+-- maybe better hard coded in-place
+
+-- tasks.prependaction("processors","before","nodes.properties.attach")
+-- tasks.appendaction("shipouts","normalizers","nodes.properties.delayed")
+
+statistics.register("properties processing time", function()
+ return statistics.elapsedseconds(properties)
+end)
+
+-- only for development
+
+-- local function show(head,level,report)
+-- for target in traverse(head) do
+-- local p = propertydata[target]
+-- if p then
+-- report("level %i, node %i, id %s, data %s",
+-- level,target,nodecodes[getid(target)],serialize(propertydata[target],false))
+-- end
+-- local id = getid(target)
+-- if id == hlist_code or id == vlist_code then
+-- local list = getlist(target)
+-- if list then
+-- show(list,level+1,report)
+-- end
+-- else
+-- -- maybe more lists
+-- end
+-- end
+-- return head, false
+-- end
+--
+-- local report_shipout = logs.reporter("properties","shipout")
+-- local report_processors = logs.reporter("properties","processors")
+--
+-- function properties.showshipout (head) return tonode(show(tonut(head),1,report_shipout )), true end
+-- function properties.showprocessors(head) return tonode(show(tonut(head),1,report_processors)), true end
+--
+-- tasks.prependaction("shipouts","before","nodes.properties.showshipout")
+-- tasks.disableaction("shipouts","nodes.properties.showshipout")
+--
+-- trackers.register("properties.shipout",function(v)
+-- tasks.setaction("shipouts","nodes.properties.showshipout",v)
+-- end)
+--
+-- tasks.appendaction ("processors","after","nodes.properties.showprocessors")
+-- tasks.disableaction("processors","nodes.properties.showprocessors")
+--
+-- trackers.register("properties.processors",function(v)
+-- tasks.setaction("processors","nodes.properties.showprocessors",v)
+-- end)
diff --git a/Master/texmf-dist/tex/context/base/node-pro.lua b/Master/texmf-dist/tex/context/base/node-pro.lua
index 60f2d8a720e..2cc00601c77 100644
--- a/Master/texmf-dist/tex/context/base/node-pro.lua
+++ b/Master/texmf-dist/tex/context/base/node-pro.lua
@@ -13,15 +13,15 @@ local trace_callbacks = false trackers.register("nodes.callbacks", function(v)
local report_nodes = logs.reporter("nodes","processors")
-local nodes, node = nodes, node
+local nodes = nodes
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local tasks = nodes.tasks
+local nuts = nodes.nuts
-local free_node = node.free
-local first_glyph = node.first_glyph or node.first_character
-local has_attribute = node.has_attribute
+local first_glyph = nodes.first_glyph
+local has_glyph = nodes.has_glyph
nodes.processors = nodes.processors or { }
local processors = nodes.processors
@@ -31,47 +31,57 @@ local processors = nodes.processors
local actions = tasks.actions("processors")
-local n = 0
+do
-local function reconstruct(head) -- we probably have a better one
- local t, n, h = { }, 0, head
- while h do
+ local tonut = nuts.tonut
+ local getid = nuts.getid
+ local getchar = nuts.getchar
+ local getnext = nuts.getnext
+
+ local n = 0
+
+ local function reconstruct(head) -- we probably have a better one
+ local t, n, h = { }, 0, tonut(head)
+ while h do
+ n = n + 1
+ local id = getid(h)
+ if id == glyph_code then -- todo: disc etc
+ t[n] = utfchar(getchar(h))
+ else
+ t[n] = "[]"
+ end
+ h = getnext(h)
+ end
+ return concat(t)
+ end
+
+ local function tracer(what,state,head,groupcode,before,after,show)
+ if not groupcode then
+ groupcode = "unknown"
+ elseif groupcode == "" then
+ groupcode = "mvl"
+ end
n = n + 1
- local id = h.id
- if id == glyph_code then -- todo: disc etc
- t[n] = utfchar(h.char)
+ if show then
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head))
else
- t[n] = "[]"
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after)
end
- h = h.next
end
- return concat(t)
-end
-local function tracer(what,state,head,groupcode,before,after,show)
- if not groupcode then
- groupcode = "unknown"
- elseif groupcode == "" then
- groupcode = "mvl"
- end
- n = n + 1
- if show then
- report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head))
- else
- report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after)
- end
-end
+ processors.tracer = tracer
-processors.tracer = tracer
+end
processors.enabled = true -- this will become a proper state (like trackers)
-function processors.pre_linebreak_filter(head,groupcode,size,packtype,direction)
- local first, found = first_glyph(head) -- they really need to be glyphs
+function processors.pre_linebreak_filter(head,groupcode) -- ,size,packtype,direction
+ -- local first, found = first_glyph(head) -- they really need to be glyphs
+ local found = has_glyph(head)
if found then
if trace_callbacks then
local before = nodes.count(head,true)
- local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first
+ local head, done = actions(head,groupcode) -- ,size,packtype,direction
local after = nodes.count(head,true)
if done then
tracer("pre_linebreak","changed",head,groupcode,before,after,true)
@@ -80,7 +90,7 @@ function processors.pre_linebreak_filter(head,groupcode,size,packtype,direction)
end
return done and head or true
else
- local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first
+ local head, done = actions(head,groupcode) -- ,size,packtype,direction
return done and head or true
end
elseif trace_callbacks then
@@ -94,7 +104,8 @@ local enabled = true
function processors.hpack_filter(head,groupcode,size,packtype,direction)
if enabled then
- local first, found = first_glyph(head) -- they really need to be glyphs
+ -- local first, found = first_glyph(head) -- they really need to be glyphs
+ local found = has_glyph(head)
if found then
if trace_callbacks then
local before = nodes.count(head,true)
@@ -118,15 +129,36 @@ function processors.hpack_filter(head,groupcode,size,packtype,direction)
return true
end
-local hpack = node.hpack
+do
+
+ local setfield = nodes.setfield
+ local hpack = nodes.hpack
+
+ function nodes.fasthpack(...) -- todo: pass explicit arguments
+ enabled = false
+ local hp, b = hpack(...)
+ setfield(hp,"prev",nil)
+ setfield(hp,"next",nil)
+ enabled = true
+ return hp, b
+ end
+
+end
+
+do
+
+ local setfield = nuts.setfield
+ local hpack = nuts.hpack
+
+ function nuts.fasthpack(...) -- todo: pass explicit arguments
+ enabled = false
+ local hp, b = hpack(...)
+ setfield(hp,"prev",nil)
+ setfield(hp,"next",nil)
+ enabled = true
+ return hp, b
+ end
-function nodes.fasthpack(...) -- todo: pass explicit arguments
- enabled = false
- local hp, b = hpack(...)
- hp.prev = nil
- hp.next = nil
- enabled = true
- return hp, b
end
callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter, "all kind of horizontal manipulations (before par break)")
diff --git a/Master/texmf-dist/tex/context/base/node-ref.lua b/Master/texmf-dist/tex/context/base/node-ref.lua
index 09e066434a6..c55db4ea36a 100644
--- a/Master/texmf-dist/tex/context/base/node-ref.lua
+++ b/Master/texmf-dist/tex/context/base/node-ref.lua
@@ -16,69 +16,82 @@ if not modules then modules = { } end modules ['node-ref'] = {
-- is grouplevel still used?
-local format = string.format
+local attributes, nodes, node = attributes, nodes, node
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+local allocate = utilities.storage.allocate, utilities.storage.mark
+local mark = utilities.storage.allocate, utilities.storage.mark
-local cleanupreferences, cleanupdestinations = false, true
+local nodeinjections = backends.nodeinjections
+local codeinjections = backends.codeinjections
-local attributes, nodes, node = attributes, nodes, node
+local cleanupreferences = false
+local cleanupdestinations = true
-local nodeinjections = backends.nodeinjections
-local codeinjections = backends.codeinjections
+local transparencies = attributes.transparencies
+local colors = attributes.colors
+local references = structures.references
+local tasks = nodes.tasks
-local transparencies = attributes.transparencies
-local colors = attributes.colors
-local references = structures.references
-local tasks = nodes.tasks
+local trace_backend = false trackers.register("nodes.backend", function(v) trace_backend = v end)
+local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end)
+local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end)
-local hpack_list = node.hpack
-local list_dimensions = node.dimensions
+local report_reference = logs.reporter("backend","references")
+local report_destination = logs.reporter("backend","destinations")
+local report_area = logs.reporter("backend","areas")
--- current.glue_set current.glue_sign
+local nuts = nodes.nuts
+local nodepool = nuts.pool
-local trace_backend = false trackers.register("nodes.backend", function(v) trace_backend = v end)
-local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end)
-local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end)
+local tonode = nuts.tonode
+local tonut = nuts.tonut
-local report_reference = logs.reporter("backend","references")
-local report_destination = logs.reporter("backend","destinations")
-local report_area = logs.reporter("backend","areas")
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
-local nodecodes = nodes.nodecodes
-local skipcodes = nodes.skipcodes
-local whatcodes = nodes.whatcodes
-local listcodes = nodes.listcodes
+local hpack_list = nuts.hpack
+local list_dimensions = nuts.dimensions
+local traverse = nuts.traverse
+local find_node_tail = nuts.tail
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glue_code = nodecodes.glue
-local whatsit_code = nodecodes.whatsit
+local nodecodes = nodes.nodecodes
+local skipcodes = nodes.skipcodes
+local whatcodes = nodes.whatcodes
+local listcodes = nodes.listcodes
-local leftskip_code = skipcodes.leftskip
-local rightskip_code = skipcodes.rightskip
-local parfillskip_code = skipcodes.parfillskip
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glue_code = nodecodes.glue
+local whatsit_code = nodecodes.whatsit
-local localpar_code = whatcodes.localpar
-local dir_code = whatcodes.dir
+local leftskip_code = skipcodes.leftskip
+local rightskip_code = skipcodes.rightskip
+local parfillskip_code = skipcodes.parfillskip
-local line_code = listcodes.line
+local localpar_code = whatcodes.localpar
+local dir_code = whatcodes.dir
-local nodepool = nodes.pool
+local line_code = listcodes.line
-local new_kern = nodepool.kern
+local new_rule = nodepool.rule
+local new_kern = nodepool.kern
-local traverse = node.traverse
-local find_node_tail = node.tail or node.slide
-local tosequence = nodes.tosequence
+local tosequence = nodes.tosequence
-- local function dimensions(parent,start,stop)
--- stop = stop and stop.next
+-- stop = stop and getnext(stop)
-- if parent then
-- if stop then
--- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop)
+-- return list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),start,stop)
-- else
--- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start)
+-- return list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign",getfield(parent,"glue_order"),start)
-- end
-- else
-- if stop then
@@ -93,9 +106,9 @@ local tosequence = nodes.tosequence
local function dimensions(parent,start,stop)
if parent then
- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop and stop.next)
+ return list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),start,stop and getnext(stop))
else
- return list_dimensions(start,stop and stop.next)
+ return list_dimensions(start,stop and getnext(stop))
end
end
@@ -110,28 +123,27 @@ local function inject_range(head,first,last,reference,make,stack,parent,pardir,t
if result and resolved then
if head == first then
if trace_backend then
- report_area("head: %04i %s %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
+ report_area("%s: %04i %s %s %s => w=%p, h=%p, d=%p, c=%S","head",
+ reference,pardir or "---",txtdir or "---",tosequence(first,last,true),width,height,depth,resolved)
end
- result.next = first
- first.prev = result
+ setfield(result,"next",first)
+ setfield(first,"prev",result)
return result, last
else
if trace_backend then
- report_area("middle: %04i %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
+ report_area("%s: %04i %s %s %s => w=%p, h=%p, d=%p, c=%S","middle",
+ reference,pardir or "---",txtdir or "---",tosequence(first,last,true),width,height,depth,resolved)
end
- local prev = first.prev
+ local prev = getprev(first)
if prev then
- result.next = first
- result.prev = prev
- prev.next = result
- first.prev = result
- else
- result.next = first
- first.prev = result
- end
- if first == head.next then
- head.next = result -- hm, weird
+ setfield(prev,"next",result)
+ setfield(result,"prev",prev)
end
+ setfield(result,"next",first)
+ setfield(first,"prev",result)
+-- if first == getnext(head) then
+-- setfield(head,"next",result) -- hm, weird
+-- end
return head, last
end
else
@@ -140,9 +152,9 @@ local function inject_range(head,first,last,reference,make,stack,parent,pardir,t
end
local function inject_list(id,current,reference,make,stack,pardir,txtdir)
- local width, height, depth, correction = current.width, current.height, current.depth, 0
+ local width, height, depth, correction = getfield(current,"width"), getfield(current,"height"), getfield(current,"depth"), 0
local moveright = false
- local first = current.list
+ local first = getlist(current)
if id == hlist_code then -- box_code line_code
-- can be either an explicit hbox or a line and there is no way
-- to recognize this; anyway only if ht/dp (then inline)
@@ -150,17 +162,17 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
if first then
if sr and sr[2] then
local last = find_node_tail(first)
- if last.id == glue_code and last.subtype == rightskip_code then
- local prev = last.prev
- moveright = first.id == glue_code and first.subtype == leftskip_code
- if prev and prev.id == glue_code and prev.subtype == parfillskip_code then
- width = dimensions(current,first,prev.prev) -- maybe not current as we already take care of it
+ if getid(last) == glue_code and getsubtype(last) == rightskip_code then
+ local prev = getprev(last)
+ moveright = getid(first) == glue_code and getsubtype(first) == leftskip_code
+ if prev and getid(prev) == glue_code and getsubtype(prev) == parfillskip_code then
+ width = dimensions(current,first,getprev(prev)) -- maybe not current as we already take care of it
else
- if moveright and first.writable then
- width = width - first.spec.stretch*current.glue_set * current.glue_sign
+ if moveright and getfield(first,"writable") then
+ width = width - getfield(getfield(first,"spec"),"stretch") * getfield(current,"glue_set") * getfield(current,"glue_sign")
end
- if last.writable then
- width = width - last.spec.stretch*current.glue_set * current.glue_sign
+ if getfield(last,"writable") then
+ width = width - getfield(getfield(last,"spec"),"stretch") * getfield(current,"glue_set") * getfield(current,"glue_sign")
end
end
end
@@ -182,22 +194,25 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
-- todo: only when width is ok
if result and resolved then
if trace_backend then
- report_area("box: %04i %s %s: w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",width,height,depth,resolved)
+ report_area("%s: %04i %s %s %s: w=%p, h=%p, d=%p, c=%S","box",
+ reference,pardir or "---",txtdir or "----","[]",width,height,depth,resolved)
end
if not first then
- current.list = result
+ setfield(current,"list",result)
elseif moveright then -- brr no prevs done
-- result after first
- local n = first.next
- result.next = n
- first.next = result
- result.prev = first
- if n then n.prev = result end
+ local n = getnext(first)
+ setfield(result,"next",n)
+ setfield(first,"next",result)
+ setfield(result,"prev",first)
+ if n then
+ setfield(n,"prev",result)
+ end
else
-- first after result
- result.next = first
- first.prev = result
- current.list = result
+ setfield(result,"next",first)
+ setfield(first,"prev",result)
+ setfield(current,"list",result)
end
end
end
@@ -210,45 +225,57 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
pardir = pardir or "==="
txtdir = txtdir or "==="
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code or id == vlist_code then
- local r = current[attribute]
- -- somehow reference is true so the following fails (second one not done) in
- -- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
- -- so let's wait till this fails again
- -- if not reference and r and (not skip or r > skip) then -- > or ~=
- if r and (not skip or r > skip) then -- > or ~=
- inject_list(id,current,r,make,stack,pardir,txtdir)
- end
+ local r = getattr(current,attribute)
+ -- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
+ -- test \goto{\TeX}[page(2)] test \gotobox{\hbox {x} \hbox {x}}[page(2)]
+ -- if r and (not skip or r >) skip then -- maybe no > test
+ -- inject_list(id,current,r,make,stack,pardir,txtdir)
+ -- end
if r then
+ if not reference then
+ reference, first, last, firstdir = r, current, current, txtdir
+ elseif r == reference then
+ -- same link
+ last = current
+ elseif (done[reference] or 0) == 0 then
+ if not skip or r > skip then -- maybe no > test
+ head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
+ reference, first, last, firstdir = nil, nil, nil, nil
+ end
+ else
+ reference, first, last, firstdir = r, current, current, txtdir
+ end
done[r] = (done[r] or 0) + 1
end
- local list = current.list
+ local list = getlist(current)
if list then
- local _
- current.list, _, pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
+ local h, ok
+ h, ok , pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
+ setfield(current,"list",h)
end
if r then
done[r] = done[r] - 1
end
elseif id == whatsit_code then
- local subtype = current.subtype
+ local subtype = getsubtype(current)
if subtype == localpar_code then
- pardir = current.dir
+ pardir = getfield(current,"dir")
elseif subtype == dir_code then
- txtdir = current.dir
+ txtdir = getfield(current,"dir")
end
- elseif id == glue_code and current.subtype == leftskip_code then -- any glue at the left?
+ elseif id == glue_code and getsubtype(current) == leftskip_code then -- any glue at the left?
--
else
- local r = current[attribute]
+ local r = getattr(current,attribute)
if not r then
-- just go on, can be kerns
elseif not reference then
reference, first, last, firstdir = r, current, current, txtdir
elseif r == reference then
last = current
- elseif (done[reference] or 0) == 0 then -- or id == glue_code and current.subtype == right_skip_code
+ elseif (done[reference] or 0) == 0 then -- or id == glue_code and getsubtype(current) == right_skip_code
if not skip or r > skip then -- maybe no > test
head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
reference, first, last, firstdir = nil, nil, nil, nil
@@ -257,7 +284,7 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
reference, first, last, firstdir = r, current, current, txtdir
end
end
- current = current.next
+ current = getnext(current)
end
if reference and (done[reference] or 0) == 0 then
head = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
@@ -272,32 +299,32 @@ local function inject_area(head,attribute,make,stack,done,parent,pardir,txtdir)
txtdir = txtdir or "==="
local current = head
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code or id == vlist_code then
- local r = current[attribute]
+ local r = getattr(current,attribute)
if r and not done[r] then
done[r] = true
inject_list(id,current,r,make,stack,pardir,txtdir)
end
- local list = current.list
+ local list = getlist(current)
if list then
- current.list = inject_area(list,attribute,make,stack,done,current,pardir,txtdir)
+ setfield(current,"list",(inject_area(list,attribute,make,stack,done,current,pardir,txtdir)))
end
elseif id == whatsit_code then
- local subtype = current.subtype
+ local subtype = getsubtype(current)
if subtype == localpar_code then
- pardir = current.dir
+ pardir = getfield(current,"dir")
elseif subtype == dir_code then
- txtdir = current.dir
+ txtdir = getfield(current,"dir")
end
else
- local r = current[attribute]
+ local r = getattr(current,attribute)
if r and not done[r] then
done[r] = true
head, current = inject_range(head,current,current,r,make,stack,parent,pardir,txtdir)
end
end
- current = current.next
+ current = getnext(current)
end
end
return head, true
@@ -305,12 +332,6 @@ end
-- tracing
-local nodepool = nodes.pool
-
-local new_rule = nodepool.rule
-local new_kern = nodepool.kern
-
-local set_attribute = node.set_attribute
local register_color = colors.register
local a_color = attributes.private('color')
@@ -347,35 +368,31 @@ local function colorize(width,height,depth,n,reference,what)
height = 65536/2
depth = height
end
- local rule = new_rule(width,height,depth)
- rule[a_colormodel] = 1 -- gray color model
- rule[a_color] = u_color
- rule[a_transparency] = u_transparency
+ local rule = new_rule(width,height,depth) -- todo: use tracer rule
+ setattr(rule,a_colormodel,1) -- gray color model
+ setattr(rule,a_color,u_color)
+ setattr(rule,a_transparency,u_transparency)
if width < 0 then
local kern = new_kern(width)
- rule.width = -width
- kern.next = rule
- rule.prev = kern
+ setfield(rule,"width",-width)
+ setfield(kern,"next",rule)
+ setfield(rule,"prev",kern)
return kern
else
return rule
end
end
-local nodepool = nodes.pool
-
-local new_kern = nodepool.kern
-
-local texattribute = tex.attribute
-local texcount = tex.count
-
-- references:
-local stack = { }
-local done = { }
-local attribute = attributes.private('reference')
-local nofreferences = 0
-local topofstack = 0
+local texsetattribute = tex.setattribute
+local texsetcount = tex.setcount
+
+local stack = { }
+local done = { }
+local attribute = attributes.private('reference')
+local nofreferences = 0
+local topofstack = 0
nodes.references = {
attribute = attribute,
@@ -390,8 +407,8 @@ local function setreference(h,d,r)
-- the preroll permits us to determine samepage (but delayed also has some advantages)
-- so some part of the backend work is already done here
stack[topofstack] = { r, h, d, codeinjections.prerollreference(r) }
- -- texattribute[attribute] = topofstack -- todo -> at tex end
- texcount.lastreferenceattribute = topofstack
+ -- texsetattribute(attribute,topofstack) -- todo -> at tex end
+ texsetcount("lastreferenceattribute",topofstack)
end
function references.get(n) -- not public so functionality can change
@@ -412,22 +429,26 @@ local function makereference(width,height,depth,reference)
end
local annot = nodeinjections.reference(width,height,depth,set)
if annot then
+annot = tonut(annot)
nofreferences = nofreferences + 1
local result, current
if trace_references then
local step = 65536
result = hpack_list(colorize(width,height-step,depth-step,2,reference,"reference")) -- step subtracted so that we can see seperate links
- result.width = 0
+ setfield(result,"width",0)
current = result
end
if current then
- current.next = annot
+ setfield(current,"next",annot)
+ setfield(annot,"prev",current)
else
result = annot
end
references.registerpage(n)
result = hpack_list(result,0)
- result.width, result.height, result.depth = 0, 0, 0
+ setfield(result,"width",0)
+ setfield(result,"height",0)
+ setfield(result,"depth",0)
if cleanupreferences then stack[reference] = nil end
return result, resolved
elseif trace_references then
@@ -438,9 +459,19 @@ local function makereference(width,height,depth,reference)
end
end
+-- function nodes.references.handler(head)
+-- if topofstack > 0 then
+-- return inject_areas(head,attribute,makereference,stack,done)
+-- else
+-- return head, false
+-- end
+-- end
+
function nodes.references.handler(head)
if topofstack > 0 then
- return inject_areas(head,attribute,makereference,stack,done)
+ head = tonut(head)
+ local head, done = inject_areas(head,attribute,makereference,stack,done)
+ return tonode(head), done
else
return head, false
end
@@ -484,34 +515,35 @@ local function makedestination(width,height,depth,reference)
step = 4*65536
width, height, depth = 5*step, 5*step, 0
end
- for n=1,#name do
- local rule = hpack_list(colorize(width,height,depth,3,reference,"destination"))
- rule.width = 0
- if not result then
- result, current = rule, rule
- else
- current.next = rule
- rule.prev = current
- current = rule
- end
- width, height = width - step, height - step
+ local rule = hpack_list(colorize(width,height,depth,3,reference,"destination"))
+ setfield(rule,"width",0)
+ if not result then
+ result, current = rule, rule
+ else
+ setfield(current,"next",rule)
+ setfield(rule,"prev",current)
+ current = rule
end
+ width, height = width - step, height - step
end
nofdestinations = nofdestinations + 1
- for n=1,#name do
- local annot = nodeinjections.destination(width,height,depth,name[n],view)
- if not result then
- result = annot
+ local annot = nodeinjections.destination(width,height,depth,name,view)
+ if annot then
+ annot = tonut(annot) -- obsolete soon
+ if result then
+ setfield(current,"next",annot)
+ setfield(annot,"prev",current)
else
- current.next = annot
- annot.prev = current
+ result = annot
end
current = find_node_tail(annot)
end
if result then
-- some internal error
result = hpack_list(result,0)
- result.width, result.height, result.depth = 0, 0, 0
+ setfield(result,"width",0)
+ setfield(result,"height",0)
+ setfield(result,"depth",0)
end
if cleanupdestinations then stack[reference] = nil end
return result, resolved
@@ -520,14 +552,25 @@ local function makedestination(width,height,depth,reference)
end
end
+-- function nodes.destinations.handler(head)
+-- if topofstack > 0 then
+-- return inject_area(head,attribute,makedestination,stack,done) -- singular
+-- else
+-- return head, false
+-- end
+-- end
+
function nodes.destinations.handler(head)
if topofstack > 0 then
- return inject_area(head,attribute,makedestination,stack,done) -- singular
+ head = tonut(head)
+ local head, done = inject_areas(head,attribute,makedestination,stack,done)
+ return tonode(head), done
else
return head, false
end
end
+
-- will move
function references.mark(reference,h,d,view)
@@ -540,7 +583,7 @@ function references.inject(prefix,reference,h,d,highlight,newwindow,layer) -- to
-- unknown ref, just don't set it and issue an error
else
-- check
- set.highlight, set.newwindow,set.layer = highlight, newwindow, layer
+ set.highlight, set.newwindow, set.layer = highlight, newwindow, layer
setreference(h,d,set) -- sets attribute / todo: for set[*].error
end
end
@@ -573,7 +616,7 @@ end
statistics.register("interactive elements", function()
if nofreferences > 0 or nofdestinations > 0 then
- return format("%s references, %s destinations",nofreferences,nofdestinations)
+ return string.format("%s references, %s destinations",nofreferences,nofdestinations)
else
return nil
end
diff --git a/Master/texmf-dist/tex/context/base/node-res.lua b/Master/texmf-dist/tex/context/base/node-res.lua
index 768aac404e2..1a9d6f02e2e 100644
--- a/Master/texmf-dist/tex/context/base/node-res.lua
+++ b/Master/texmf-dist/tex/context/base/node-res.lua
@@ -18,13 +18,8 @@ local report_nodes = logs.reporter("nodes","housekeeping")
local nodes, node = nodes, node
-local copy_node = node.copy
-local free_node = node.free
-local free_list = node.flush_list
-local new_node = node.new
-
nodes.pool = nodes.pool or { }
-local pool = nodes.pool
+local nodepool = nodes.pool
local whatsitcodes = nodes.whatsitcodes
local skipcodes = nodes.skipcodes
@@ -35,372 +30,537 @@ local glyph_code = nodecodes.glyph
local allocate = utilities.storage.allocate
+local texgetcount = tex.getcount
+
local reserved, nofreserved = { }, 0
-local function register_node(n)
- nofreserved = nofreserved + 1
- reserved[nofreserved] = n
- return n
-end
+-- user nodes
-pool.register = register_node
+local userids = allocate()
+local lastid = 0
-function pool.cleanup(nofboxes) -- todo
- if nodes.tracers.steppers then -- to be resolved
- nodes.tracers.steppers.reset() -- todo: make a registration subsystem
- end
- local nl, nr = 0, nofreserved
- for i=1,nofreserved do
- local ri = reserved[i]
- -- if not (ri.id == glue_spec and not ri.is_writable) then
- free_node(reserved[i])
- -- end
+setmetatable(userids, {
+ __index = function(t,k)
+ if type(k) == "string" then
+ lastid = lastid + 1
+ rawset(userids,lastid,k)
+ rawset(userids,k,lastid)
+ return lastid
+ else
+ rawset(userids,k,k)
+ return k
+ end
+ end,
+ __call = function(t,k)
+ return t[k]
end
- if nofboxes then
- local tb = tex.box
- for i=0,nofboxes do
- local l = tb[i]
- if l then
- free_node(tb[i])
- nl = nl + 1
- end
+} )
+
+-- nuts overload
+
+local nuts = nodes.nuts
+local nutpool = { }
+nuts.pool = nutpool
+
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getbox = nuts.getbox
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getid = nuts.getid
+local getlist = nuts.getlist
+
+local copy_nut = nuts.copy
+local new_nut = nuts.new
+local free_nut = nuts.free
+
+local copy_node = nodes.copy
+local new_node = nodes.new
+
+-- at some point we could have a dual set (the overhead of tonut is not much larger than
+-- metatable associations at the lua/c end esp if we also take assignments into account
+
+-- table.setmetatableindex(nodepool,function(t,k,v)
+-- -- report_nodes("defining nodepool[%s] instance",k)
+-- local f = nutpool[k]
+-- local v = function(...)
+-- return tonode(f(...))
+-- end
+-- t[k] = v
+-- return v
+-- end)
+--
+-- -- we delay one step because that permits us a forward reference
+-- -- e.g. in pdfsetmatrix
+
+table.setmetatableindex(nodepool,function(t,k,v)
+ -- report_nodes("defining nodepool[%s] instance",k)
+ local v = function(...)
+ local f = nutpool[k]
+ local v = function(...)
+ return tonode(f(...))
end
+ t[k] = v
+ return v(...)
end
- reserved = { }
- nofreserved = 0
- return nr, nl, nofboxes -- can be nil
+ t[k] = v
+ return v
+end)
+
+local function register_nut(n)
+ nofreserved = nofreserved + 1
+ reserved[nofreserved] = n
+ return n
end
-function pool.usage()
- local t = { }
- for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do
- t[tag] = n
+local function register_node(n)
+ nofreserved = nofreserved + 1
+ if type(n) == "number" then -- isnut(n)
+ reserved[nofreserved] = n
+ else
+ reserved[nofreserved] = tonut(n)
end
- return t
+ return n
end
-local disc = register_node(new_node("disc"))
-local kern = register_node(new_node("kern",kerncodes.userkern))
-local fontkern = register_node(new_node("kern",kerncodes.fontkern))
-local penalty = register_node(new_node("penalty"))
-local glue = register_node(new_node("glue")) -- glue.spec = nil
-local glue_spec = register_node(new_node("glue_spec"))
-local glyph = register_node(new_node("glyph",0))
-local textdir = register_node(new_node("whatsit",whatsitcodes.dir))
-local latelua = register_node(new_node("whatsit",whatsitcodes.latelua))
-local special = register_node(new_node("whatsit",whatsitcodes.special))
-local user_n = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_n.type = 100 -- 44
-local user_l = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_l.type = 110 -- 44
-local user_s = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_s.type = 115 -- 44
-local user_t = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_t.type = 116 -- 44
-local left_margin_kern = register_node(new_node("margin_kern",0))
-local right_margin_kern = register_node(new_node("margin_kern",1))
-local lineskip = register_node(new_node("glue",skipcodes.lineskip))
-local baselineskip = register_node(new_node("glue",skipcodes.baselineskip))
-local leftskip = register_node(new_node("glue",skipcodes.leftskip))
-local rightskip = register_node(new_node("glue",skipcodes.rightskip))
-local temp = register_node(new_node("temp",0))
-local noad = register_node(new_node("noad"))
+nodepool.userids = userids
+nodepool.register = register_node
+
+nutpool.userids = userids
+nutpool.register = register_node -- could be register_nut
+
+-- so far
+
+local disc = register_nut(new_nut("disc"))
+local kern = register_nut(new_nut("kern",kerncodes.userkern))
+local fontkern = register_nut(new_nut("kern",kerncodes.fontkern))
+local penalty = register_nut(new_nut("penalty"))
+local glue = register_nut(new_nut("glue")) -- glue.spec = nil
+local glue_spec = register_nut(new_nut("glue_spec"))
+local glyph = register_nut(new_nut("glyph",0))
+local textdir = register_nut(new_nut("whatsit",whatsitcodes.dir))
+local latelua = register_nut(new_nut("whatsit",whatsitcodes.latelua))
+local special = register_nut(new_nut("whatsit",whatsitcodes.special))
+local user_n = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_n,"type",100) -- 44
+local user_l = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_l,"type",110) -- 44
+local user_s = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_s,"type",115) -- 44
+local user_t = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_t,"type",116) -- 44
+local left_margin_kern = register_nut(new_nut("margin_kern",0))
+local right_margin_kern = register_nut(new_nut("margin_kern",1))
+local lineskip = register_nut(new_nut("glue",skipcodes.lineskip))
+local baselineskip = register_nut(new_nut("glue",skipcodes.baselineskip))
+local leftskip = register_nut(new_nut("glue",skipcodes.leftskip))
+local rightskip = register_nut(new_nut("glue",skipcodes.rightskip))
+local temp = register_nut(new_nut("temp",0))
+local noad = register_nut(new_nut("noad"))
-- the dir field needs to be set otherwise crash:
-local rule = register_node(new_node("rule")) rule .dir = "TLT"
-local hlist = register_node(new_node("hlist")) hlist.dir = "TLT"
-local vlist = register_node(new_node("vlist")) vlist.dir = "TLT"
-
-function pool.zeroglue(n)
- local s = n.spec
- return not writable or (
- s.width == 0
- and s.stretch == 0
- and s.shrink == 0
- and s.stretch_order == 0
- and s.shrink_order == 0
- )
-end
-
-function pool.glyph(fnt,chr)
- local n = copy_node(glyph)
- if fnt then n.font = fnt end
- if chr then n.char = chr end
+local rule = register_nut(new_nut("rule")) setfield(rule, "dir","TLT")
+local hlist = register_nut(new_nut("hlist")) setfield(hlist,"dir","TLT")
+local vlist = register_nut(new_nut("vlist")) setfield(vlist,"dir","TLT")
+
+function nutpool.zeroglue(n)
+ local s = getfield(n,"spec")
+ return
+ getfield(s,"width") == 0 and
+ getfield(s,"stretch") == 0 and
+ getfield(s,"shrink") == 0 and
+ getfield(s,"stretch_order") == 0 and
+ getfield(s,"shrink_order") == 0
+end
+
+function nutpool.glyph(fnt,chr)
+ local n = copy_nut(glyph)
+ if fnt then setfield(n,"font",fnt) end
+ if chr then setfield(n,"char",chr) end
return n
end
-function pool.penalty(p)
- local n = copy_node(penalty)
- n.penalty = p
+function nutpool.penalty(p)
+ local n = copy_nut(penalty)
+ setfield(n,"penalty",p)
return n
end
-function pool.kern(k)
- local n = copy_node(kern)
- n.kern = k
+function nutpool.kern(k)
+ local n = copy_nut(kern)
+ setfield(n,"kern",k)
return n
end
-function pool.fontkern(k)
- local n = copy_node(fontkern)
- n.kern = k
+function nutpool.fontkern(k)
+ local n = copy_nut(fontkern)
+ setfield(n,"kern",k)
return n
end
-function pool.gluespec(width,stretch,shrink,stretch_order,shrink_order)
- local s = copy_node(glue_spec)
- if width then s.width = width end
- if stretch then s.stretch = stretch end
- if shrink then s.shrink = shrink end
- if stretch_order then s.stretch_order = stretch_order end
- if shrink_order then s.shrink_order = shrink_order end
+function nutpool.gluespec(width,stretch,shrink,stretch_order,shrink_order)
+ local s = copy_nut(glue_spec)
+ if width then setfield(s,"width",width) end
+ if stretch then setfield(s,"stretch",stretch) end
+ if shrink then setfield(s,"shrink",shrink) end
+ if stretch_order then setfield(s,"stretch_order",stretch_order) end
+ if shrink_order then setfield(s,"shrink_order",shrink_order) end
return s
end
local function someskip(skip,width,stretch,shrink,stretch_order,shrink_order)
- local n = copy_node(skip)
+ local n = copy_nut(skip)
if not width then
-- no spec
elseif width == false or tonumber(width) then
- local s = copy_node(glue_spec)
- if width then s.width = width end
- if stretch then s.stretch = stretch end
- if shrink then s.shrink = shrink end
- if stretch_order then s.stretch_order = stretch_order end
- if shrink_order then s.shrink_order = shrink_order end
- n.spec = s
+ local s = copy_nut(glue_spec)
+ if width then setfield(s,"width",width) end
+ if stretch then setfield(s,"stretch",stretch) end
+ if shrink then setfield(s,"shrink",shrink) end
+ if stretch_order then setfield(s,"stretch_order",stretch_order) end
+ if shrink_order then setfield(s,"shrink_order",shrink_order) end
+ setfield(n,"spec",s)
else
-- shared
- n.spec = copy_node(width)
+ setfield(n,"spec",copy_nut(width))
end
return n
end
-function pool.stretch(a,b)
- local n = copy_node(glue)
- local s = copy_node(glue_spec)
+function nutpool.stretch(a,b)
+ local n = copy_nut(glue)
+ local s = copy_nut(glue_spec)
if b then
- s.stretch = a
- s.stretch_order = b
+ setfield(s,"stretch",a)
+ setfield(s,"stretch_order",b)
else
- s.stretch = 1
- s.stretch_order = a or 1
+ setfield(s,"stretch",1)
+ setfield(s,"stretch_order",a or 1)
end
- n.spec = s
+ setfield(n,"spec",s)
return n
end
-function pool.shrink(a,b)
- local n = copy_node(glue)
- local s = copy_node(glue_spec)
+function nutpool.shrink(a,b)
+ local n = copy_nut(glue)
+ local s = copy_nut(glue_spec)
if b then
- s.shrink = a
- s.shrink_order = b
+ setfield(s,"shrink",a)
+ setfield(s,"shrink_order",b)
else
- s.shrink = 1
- s.shrink_order = a or 1
+ setfield(s,"shrink",1)
+ setfield(s,"shrink_order",a or 1)
end
- n.spec = s
+ setfield(n,"spec",s)
return n
end
-
-function pool.glue(width,stretch,shrink,stretch_order,shrink_order)
+function nutpool.glue(width,stretch,shrink,stretch_order,shrink_order)
return someskip(glue,width,stretch,shrink,stretch_order,shrink_order)
end
-function pool.leftskip(width,stretch,shrink,stretch_order,shrink_order)
+function nutpool.negatedglue(glue)
+ local n = copy_nut(glue)
+ local s = copy_nut(getfield(n,"spec"))
+ local width = getfield(s,"width")
+ local stretch = getfield(s,"stretch")
+ local shrink = getfield(s,"shrink")
+ if width then setfield(s,"width", -width) end
+ if stretch then setfield(s,"stretch",-stretch) end
+ if shrink then setfield(s,"shrink", -shrink) end
+ setfield(n,"spec",s)
+ return n
+end
+
+function nutpool.leftskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(leftskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function pool.rightskip(width,stretch,shrink,stretch_order,shrink_order)
+function nutpool.rightskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(rightskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function pool.lineskip(width,stretch,shrink,stretch_order,shrink_order)
+function nutpool.lineskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(lineskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function pool.baselineskip(width,stretch,shrink)
+function nutpool.baselineskip(width,stretch,shrink)
return someskip(baselineskip,width,stretch,shrink)
end
-function pool.disc()
- return copy_node(disc)
+function nutpool.disc()
+ return copy_nut(disc)
end
-function pool.textdir(dir)
- local t = copy_node(textdir)
- t.dir = dir
+function nutpool.textdir(dir)
+ local t = copy_nut(textdir)
+ setfield(t,"dir",dir)
return t
end
-function pool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
- local n = copy_node(rule)
- if width then n.width = width end
- if height then n.height = height end
- if depth then n.depth = depth end
- if dir then n.dir = dir end
+function nutpool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
+ local n = copy_nut(rule)
+ if width then setfield(n,"width",width) end
+ if height then setfield(n,"height",height) end
+ if depth then setfield(n,"depth",depth) end
+ if dir then setfield(n,"dir",dir) end
return n
end
-if node.has_field(latelua,'string') then
- function pool.latelua(code)
- local n = copy_node(latelua)
- n.string = code
+function nutpool.latelua(code)
+ local n = copy_nut(latelua)
+ setfield(n,"string",code)
+ return n
+end
+
+if context and _cldo_ then
+
+ -- a typical case where we have more nodes than nuts
+
+ local context = context
+
+ local f_cldo = string.formatters["_cldo_(%i)"]
+ local register = context.registerfunction
+
+ local latelua_node = register_node(new_node("whatsit",whatsitcodes.latelua))
+ local latelua_nut = register_nut (new_nut ("whatsit",whatsitcodes.latelua))
+
+ local setfield_node = nodes.setfield
+ local setfield_nut = nuts .setfield
+
+ function nodepool.lateluafunction(f)
+ local n = copy_node(latelua_node)
+ setfield_node(n,"string",f_cldo(register(f)))
return n
end
-else
- function pool.latelua(code)
- local n = copy_node(latelua)
- n.data = code
+ function nutpool.lateluafunction(f)
+ local n = copy_nut(latelua_nut)
+ setfield_nut(n,"string",f_cldo(register(f)))
return n
end
+
+ -- when function in latelua:
+
+ -- function nodepool.lateluafunction(f)
+ -- local n = copy_node(latelua_node)
+ -- setfield_node(n,"string",f)
+ -- return n
+ -- end
+ -- function nutpool.lateluafunction(f)
+ -- local n = copy_nut(latelua_nut)
+ -- setfield_nut(n,"string",f)
+ -- return n
+ -- end
+
+ local latefunction = nodepool.lateluafunction
+ local flushnode = context.flushnode
+
+ function context.lateluafunction(f)
+ flushnode(latefunction(f)) -- hm, quite some indirect calls
+ end
+
+ -- when function in latelua:
+
+ -- function context.lateluafunction(f)
+ -- local n = copy_node(latelua_node)
+ -- setfield_node(n,"string",f)
+ -- flushnode(n)
+ -- end
+
+ -- local contextsprint = context.sprint
+ -- local ctxcatcodes = tex.ctxcatcodes
+ -- local storenode = context.storenode
+
+ -- when 0.79 is out:
+
+ -- function context.lateluafunction(f)
+ -- contextsprint(ctxcatcodes,"\\cldl",storenode(latefunction(f))," ")
+ -- end
+
+ -- when function in latelua:
+
+ -- function context.lateluafunction(f)
+ -- local n = copy_node(latelua_node)
+ -- setfield_node(n,"string",f)
+ -- contextsprint(ctxcatcodes,"\\cldl",storenode(n)," ")
+ -- end
+
end
-function pool.leftmarginkern(glyph,width)
- local n = copy_node(left_margin_kern)
+function nutpool.leftmarginkern(glyph,width)
+ local n = copy_nut(left_margin_kern)
if not glyph then
report_nodes("invalid pointer to left margin glyph node")
- elseif glyph.id ~= glyph_code then
+ elseif getid(glyph) ~= glyph_code then
report_nodes("invalid node type %a for %s margin glyph node",nodecodes[glyph],"left")
else
- n.glyph = glyph
+ setfield(n,"glyph",glyph)
end
if width then
- n.width = width
+ setfield(n,"width",width)
end
return n
end
-function pool.rightmarginkern(glyph,width)
- local n = copy_node(right_margin_kern)
+function nutpool.rightmarginkern(glyph,width)
+ local n = copy_nut(right_margin_kern)
if not glyph then
report_nodes("invalid pointer to right margin glyph node")
- elseif glyph.id ~= glyph_code then
+ elseif getid(glyph) ~= glyph_code then
report_nodes("invalid node type %a for %s margin glyph node",nodecodes[p],"right")
else
- n.glyph = glyph
+ setfield(n,"glyph",glyph)
end
if width then
- n.width = width
+ setfield(n,"width",width)
end
return n
end
-function pool.temp()
- return copy_node(temp)
+function nutpool.temp()
+ return copy_nut(temp)
end
-function pool.noad()
- return copy_node(noad)
+function nutpool.noad()
+ return copy_nut(noad)
end
-function pool.hlist()
- return copy_node(hlist)
-end
-
-function pool.vlist()
- return copy_node(vlist)
+function nutpool.hlist(list,width,height,depth)
+ local n = copy_nut(hlist)
+ if list then
+ setfield(n,"list",list)
+ end
+ if width then
+ setfield(n,"width",width)
+ end
+ if height then
+ setfield(n,"height",height)
+ end
+ if depth then
+ setfield(n,"depth",depth)
+ end
+ return n
end
---[[
-At some point we ran into a problem that the glue specification
-of the zeropoint dimension was overwritten when adapting a glue spec
-node. This is a side effect of glue specs being shared. After a
-couple of hours tracing and debugging Taco and I came to the
-conclusion that it made no sense to complicate the spec allocator
-and settled on a writable flag. This all is a side effect of the
-fact that some glues use reserved memory slots (with the zeropoint
-glue being a noticeable one). So, next we wrap this into a function
-and hide it for the user. And yes, LuaTeX now gives a warning as
-well.
-]]--
-
-function nodes.writable_spec(n) -- not pool
- local spec = n.spec
- if not spec then
- spec = copy_node(glue_spec)
- n.spec = spec
- elseif not spec.writable then
- spec = copy_node(spec)
- n.spec = spec
+function nutpool.vlist(list,width,height,depth)
+ local n = copy_nut(vlist)
+ if list then
+ setfield(n,"list",list)
end
- return spec
+ if width then
+ setfield(n,"width",width)
+ end
+ if height then
+ setfield(n,"height",height)
+ end
+ if depth then
+ setfield(n,"depth",depth)
+ end
+ return n
end
-- local num = userids["my id"]
-- local str = userids[num]
-local userids = allocate() pool.userids = userids
-local lastid = 0
-
-setmetatable(userids, {
- __index = function(t,k)
- if type(k) == "string" then
- lastid = lastid + 1
- rawset(userids,lastid,k)
- rawset(userids,k,lastid)
- return lastid
- else
- rawset(userids,k,k)
- return k
- end
- end,
- __call = function(t,k)
- return t[k]
- end
-} )
-
-function pool.usernumber(id,num)
- local n = copy_node(user_n)
+function nutpool.usernumber(id,num)
+ local n = copy_nut(user_n)
if num then
- n.user_id, n.value = id, num
+ setfield(n,"user_id",id)
+ setfield(n,"value",num)
elseif id then
- n.value = id
+ setfield(n,"value",id)
end
return n
end
-function pool.userlist(id,list)
- local n = copy_node(user_l)
+function nutpool.userlist(id,list)
+ local n = copy_nut(user_l)
if list then
- n.user_id, n.value = id, list
+ setfield(n,"user_id",id)
+ setfield(n,"value",list)
else
- n.value = id
+ setfield(n,"value",id)
end
return n
end
-function pool.userstring(id,str)
- local n = copy_node(user_s)
+function nutpool.userstring(id,str)
+ local n = copy_nut(user_s)
if str then
- n.user_id, n.value = id, str
+ setfield(n,"user_id",id)
+ setfield(n,"value",str)
else
- n.value = id
+ setfield(n,"value",id)
end
return n
end
-function pool.usertokens(id,tokens)
- local n = copy_node(user_t)
+function nutpool.usertokens(id,tokens)
+ local n = copy_nut(user_t)
if tokens then
- n.user_id, n.value = id, tokens
+ setfield(n,"user_id",id)
+ setfield(n,"value",tokens)
else
- n.value = id
+ setfield(n,"value",id)
end
return n
end
-function pool.special(str)
- local n = copy_node(special)
- n.data = str
+function nutpool.special(str)
+ local n = copy_nut(special)
+ setfield(n,"data",str)
return n
end
+-- housekeeping
+
+local function cleanup(nofboxes) -- todo
+ if nodes.tracers.steppers then -- to be resolved
+ nodes.tracers.steppers.reset() -- todo: make a registration subsystem
+ end
+ local nl, nr = 0, nofreserved
+ for i=1,nofreserved do
+ local ri = reserved[i]
+ -- if not (getid(ri) == glue_spec and not getfield(ri,"is_writable")) then
+ free_nut(reserved[i])
+ -- end
+ end
+ if nofboxes then
+ for i=0,nofboxes do
+ local l = getbox(i)
+ if l then
+-- print(nodes.listtoutf(getlist(l)))
+ free_nut(l) -- also list ?
+ nl = nl + 1
+ end
+ end
+ end
+ reserved = { }
+ nofreserved = 0
+ return nr, nl, nofboxes -- can be nil
+end
+
+
+local function usage()
+ local t = { }
+ for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do
+ t[tag] = n
+ end
+ return t
+end
+
+nutpool .cleanup = cleanup
+nodepool.cleanup = cleanup
+
+nutpool .usage = usage
+nodepool.usage = usage
+
+-- end
+
statistics.register("cleaned up reserved nodes", function()
- return format("%s nodes, %s lists of %s", pool.cleanup(tex.count["c_syst_last_allocated_box"]))
+ return format("%s nodes, %s lists of %s", cleanup(texgetcount("c_syst_last_allocated_box")))
end) -- \topofboxstack
statistics.register("node memory usage", function() -- comes after cleanup !
return status.node_mem_usage
end)
-lua.registerfinalizer(pool.cleanup, "cleanup reserved nodes")
+lua.registerfinalizer(cleanup, "cleanup reserved nodes")
diff --git a/Master/texmf-dist/tex/context/base/node-rul.lua b/Master/texmf-dist/tex/context/base/node-rul.lua
index 953beb18666..6f3bc9df9e6 100644
--- a/Master/texmf-dist/tex/context/base/node-rul.lua
+++ b/Master/texmf-dist/tex/context/base/node-rul.lua
@@ -13,12 +13,28 @@ if not modules then modules = { } end modules ['node-rul'] = {
local attributes, nodes, node = attributes, nodes, node
-local nodecodes = nodes.nodecodes
-local tasks = nodes.tasks
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local rule_code = nodecodes.rule
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getlist = nuts.getlist
+
+local nodecodes = nodes.nodecodes
+local tasks = nodes.tasks
+
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local rule_code = nodecodes.rule
function nodes.striprange(first,last) -- todo: dir
if first and last then -- just to be sure
@@ -26,11 +42,11 @@ function nodes.striprange(first,last) -- todo: dir
return first, last
end
while first and first ~= last do
- local id = first.id
+ local id = getid(first)
if id == glyph_code or id == disc_code then -- or id == rule_code
break
else
- first = first.next
+ first = getnext(first)
end
end
if not first then
@@ -39,13 +55,13 @@ function nodes.striprange(first,last) -- todo: dir
return first, last
end
while last and last ~= first do
- local id = last.id
+ local id = getid(last)
if id == glyph_code or id == disc_code then -- or id == rule_code
break
else
- local prev = last.prev -- luatex < 0.70 has italic correction kern not prev'd
+ local prev = getprev(last) -- luatex < 0.70 has italic correction kern not prev'd
if prev then
- last = last.prev
+ last = prev
else
break
end
@@ -73,18 +89,21 @@ local a_color = attributes.private('color')
local a_transparency = attributes.private('transparency')
local a_colorspace = attributes.private('colormodel')
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local striprange = nodes.striprange
-local list_dimensions = node.dimensions
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local list_dimensions = nuts.dimensions
+local hpack_nodes = nuts.hpack
-local hpack_nodes = node.hpack
+local striprange = nodes.striprange
local fontdata = fonts.hashes.identifiers
local variables = interfaces.variables
local dimenfactor = fonts.helpers.dimenfactor
local splitdimen = number.splitdimen
+local v_yes = variables.yes
+local v_foreground = variables.foreground
+
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
local whatcodes = nodes.whatcodes
@@ -108,7 +127,7 @@ local dir_code = whatcodes.dir
local kerning_code = kerncodes.kern
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_rule = nodepool.rule
local new_kern = nodepool.kern
@@ -138,9 +157,9 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
local f, l, a, d, i, class
local continue, done, strip, level = false, false, true, -1
while n do
- local id = n.id
+ local id = getid(n)
if id == glyph_code or id == rule_code then
- local aa = n[attribute]
+ local aa = getattr(n,attribute)
if aa then
if aa == a then
if not f then -- ?
@@ -150,7 +169,7 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
else
-- possible extensions: when in same class then keep spanning
local newlevel, newclass = floor(aa/1000), aa%1000
---~ strip = not continue or level == 1 -- 0
+ -- strip = not continue or level == 1 -- 0
if f then
if class == newclass then -- and newlevel > level then
head, done = flush(head,f,l,d,level,parent,false), true
@@ -161,7 +180,7 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
f, l, a = n, n, aa
level, class = newlevel, newclass
d = data[class]
- continue = d.continue == variables.yes
+ continue = d.continue == v_yes
end
else
if f then
@@ -169,18 +188,26 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
end
f, l, a = nil, nil, nil
end
- elseif f and (id == disc_code or (id == kern_code and n.subtype == kerning_code)) then
- l = n
+-- elseif f and (id == disc_code or (id == kern_code and getsubtype(n) == kerning_code)) then
+-- l = n
+ elseif id == disc_code then
+ if f then
+ l = n
+ end
+ elseif id == kern_code and getsubtype(n) == kerning_code then
+ if f then
+ l = n
+ end
elseif id == hlist_code or id == vlist_code then
if f then
head, done = flush(head,f,l,d,level,parent,strip), true
f, l, a = nil, nil, nil
end
- local list = n.list
+ local list = getlist(n)
if list then
- n.list = processwords(attribute,data,flush,list,n)
+ setfield(n,"list",(processwords(attribute,data,flush,list,n))) -- watch ()
end
- elseif checkdir and id == whatsit_code and n.subtype == dir_code then -- only changes in dir, we assume proper boundaries
+ elseif checkdir and id == whatsit_code and getsubtype(n) == dir_code then -- only changes in dir, we assume proper boundaries
if f and a then
l = n
end
@@ -188,13 +215,12 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
if continue then
if id == penalty_code then
l = n
- elseif id == kern_code then
- l = n
+ -- elseif id == kern_code then
+ -- l = n
elseif id == glue_code then
-- catch \underbar{a} \underbar{a} (subtype test is needed)
- local subtype = n.subtype
- if continue and n[attribute] and
- (subtype == userskip_code or subtype == spaceskip_code or subskip == xspaceskip_code) then
+ local subtype = getsubtype(n)
+ if getattr(n,attribute) and (subtype == userskip_code or subtype == spaceskip_code or subtype == xspaceskip_code) then
l = n
else
head, done = flush(head,f,l,d,level,parent,strip), true
@@ -206,7 +232,7 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
f, l, a = nil, nil, nil
end
end
- n = n.next
+ n = getnext(n)
end
if f then
head, done = flush(head,f,l,d,level,parent,strip), true
@@ -217,7 +243,16 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
end
end
-nodes.processwords = processwords
+-- nodes.processwords = processwords
+
+nodes.processwords = function(attribute,data,flush,head,parent) -- we have hlistdir and local dir
+ head = tonut(head)
+ if parent then
+ parent = tonut(parent)
+ end
+ local head, done = processwords(attribute,data,flush,head,parent)
+ return tonode(head), done
+end
--
@@ -236,7 +271,7 @@ end
local a_viewerlayer = attributes.private("viewerlayer")
local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but acceptable for this purpose
- if f.id ~= glyph_code then
+ if getid(f) ~= glyph_code then
-- saveguard ... we need to deal with rules and so (math)
return head
end
@@ -254,16 +289,16 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
if not f then
return head
end
- local w = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,f,l.next)
+ local w = list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),f,getnext(l))
local method, offset, continue, dy, order, max = d.method, d.offset, d.continue, d.dy, d.order, d.max
local rulethickness, unit = d.rulethickness, d.unit
local ma, ca, ta = d.ma, d.ca, d.ta
- local colorspace = ma > 0 and ma or f[a_colorspace] or 1
- local color = ca > 0 and ca or f[a_color]
- local transparency = ta > 0 and ta or f[a_transparency]
- local foreground = order == variables.foreground
+ local colorspace = ma > 0 and ma or getattr(f,a_colorspace) or 1
+ local color = ca > 0 and ca or getattr(f,a_color)
+ local transparency = ta > 0 and ta or getattr(f,a_transparency)
+ local foreground = order == v_foreground
- local e = dimenfactor(unit,fontdata[f.font]) -- what if no glyph node
+ local e = dimenfactor(unit,getfont(f)) -- what if no glyph node
local rt = tonumber(rulethickness)
if rt then
@@ -271,7 +306,7 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
else
local n, u = splitdimen(rulethickness)
if n and u then -- we need to intercept ex and em and % and ...
- rulethickness = n * dimenfactor(u,fontdata[f.font]) / 2
+ rulethickness = n * dimenfactor(u,fontdata[getfont(f)]) / 2
else
rulethickness = 1/5
end
@@ -290,18 +325,18 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
local ht = (offset+(i-1)*dy)*e + rulethickness - m
local dp = -(offset+(i-1)*dy)*e + rulethickness + m
local r = new_rule(w,ht,dp)
- local v = f[a_viewerlayer]
+ local v = getattr(f,a_viewerlayer)
-- quick hack
if v then
- r[a_viewerlayer] = v
+ setattr(r,a_viewerlayer,v)
end
--
if color then
- r[a_colorspace] = colorspace
- r[a_color] = color
+ setattr(r,a_colorspace,colorspace)
+ setattr(r,a_color,color)
end
if transparency then
- r[a_transparency] = transparency
+ setattr(r,a_transparency,transparency)
end
local k = new_kern(-w)
if foreground then
@@ -355,21 +390,27 @@ local function flush_shifted(head,first,last,data,level,parent,strip) -- not tha
if true then
first, last = striprange(first,last)
end
- local prev, next = first.prev, last.next
- first.prev, last.next = nil, nil
- local width, height, depth = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,first,next)
+ local prev = getprev(first)
+ local next = getnext(last)
+ setfield(first,"prev",nil)
+ setfield(last,"next",nil)
+ local width, height, depth = list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),first,next)
local list = hpack_nodes(first,width,"exactly")
if first == head then
head = list
end
if prev then
- prev.next, list.prev = list, prev
+ setfield(prev,"next",list)
+ setfield(list,"prev",prev)
end
if next then
- next.prev, list.next = list, next
+ setfield(next,"prev",list)
+ setfield(list,"next",next)
end
- local raise = data.dy * dimenfactor(data.unit,fontdata[first.font])
- list.shift, list.height, list.depth = raise, height, depth
+ local raise = data.dy * dimenfactor(data.unit,fontdata[getfont(first)])
+ setfield(list,"shift",raise)
+ setfield(list,"height",height)
+ setfield(list,"depth",depth)
if trace_shifted then
report_shifted("width %p, nodes %a, text %a",width,n_tostring(first,last),n_tosequence(first,last,true))
end
diff --git a/Master/texmf-dist/tex/context/base/node-rul.mkiv b/Master/texmf-dist/tex/context/base/node-rul.mkiv
index 640cc54fe6a..2d2e61134ef 100644
--- a/Master/texmf-dist/tex/context/base/node-rul.mkiv
+++ b/Master/texmf-dist/tex/context/base/node-rul.mkiv
@@ -185,9 +185,14 @@
% \definebar[touchbar] [\c!method=0,\c!dy=-0.4,\c!offset=-0.0]
% \definebar[touchbars] [touchbar] [\c!continue=\v!yes]
-\definebar[\v!overstrike][\c!method=0,\c!dy=0.4,\c!offset=0.5,\c!continue=\v!yes]
-\definebar[\v!underbar] [\c!method=1,\c!dy=-0.4,\c!offset=-0.3,\c!continue=\v!yes]
+\let\normalmathoverbar \overbar
+\let\normalmathunderbar \underbar
+\let\normalmathoverstrike \overstrike
+\let\normalmathunderstrike\understrike
+
\definebar[\v!overbar] [\c!method=1,\c!dy=0.4,\c!offset=1.8,\c!continue=\v!yes]
+\definebar[\v!underbar] [\c!method=1,\c!dy=-0.4,\c!offset=-0.3,\c!continue=\v!yes]
+\definebar[\v!overstrike][\c!method=0,\c!dy=0.4,\c!offset=0.5,\c!continue=\v!yes]
\definebar
[\v!understrike]
@@ -198,16 +203,41 @@
\c!order=\v!background,
\c!color=lightgray]
-\definebar[\v!overstrikes] [\v!overstrike] [\c!continue=\v!no]
-\definebar[\v!underbars] [\v!underbar] [\c!continue=\v!no]
\definebar[\v!overbars] [\v!overbar] [\c!continue=\v!no]
+\definebar[\v!underbars] [\v!underbar] [\c!continue=\v!no]
+\definebar[\v!overstrikes] [\v!overstrike] [\c!continue=\v!no]
\definebar[\v!understrikes][\v!understrike][\c!continue=\v!no]
% we want these always so ...
-\expandafter\let\expandafter\overstrike \csname\v!overstrike \endcsname
-\expandafter\let\expandafter\underbar \csname\v!underbar \endcsname
-\expandafter\let\expandafter\overbar \csname\v!overbar \endcsname
+\ifdefined\normalmathunderbar
+ \expandafter\let\expandafter\normaltextunderbar\csname\v!underbar\endcsname
+ \unexpanded\def\underbar{\mathortext\normalmathunderbar\normaltextunderbar}
+\else
+ \expandafter\let\expandafter\underbar\csname\v!underbar\endcsname
+\fi
+
+\ifdefined\normalmathoverbar
+ \expandafter\let\expandafter\normaltextoverbar\csname\v!overbar\endcsname
+ \unexpanded\def\overbar{\mathortext\normalmathoverbar\normaltextoverbar}
+\else
+ \expandafter\let\expandafter\overbar\csname\v!overbar\endcsname
+\fi
+
+\ifdefined\normalmathunderstrike
+ \expandafter\let\expandafter\normaltextunderstrike\csname\v!understrike\endcsname
+ \unexpanded\def\understrike{\mathortext\normalmathunderstrike\normaltextunderstrike}
+\else
+ \expandafter\let\expandafter\understrike\csname\v!understrike\endcsname
+\fi
+
+\ifdefined\normalmathoverstrike
+ \expandafter\let\expandafter\normaltextoverstrike\csname\v!overstrike\endcsname
+ \unexpanded\def\overstrike{\mathortext\normalmathoverstrike \normaltextoverstrike}
+\else
+ \expandafter\let\expandafter\overstrike\csname\v!overstrike\endcsname
+\fi
+
\expandafter\let\expandafter\overstrikes\csname\v!overstrikes\endcsname
\expandafter\let\expandafter\underbars \csname\v!underbars \endcsname
\expandafter\let\expandafter\overbars \csname\v!overbars \endcsname
diff --git a/Master/texmf-dist/tex/context/base/node-ser.lua b/Master/texmf-dist/tex/context/base/node-ser.lua
index b0a6e9952aa..d7593cec748 100644
--- a/Master/texmf-dist/tex/context/base/node-ser.lua
+++ b/Master/texmf-dist/tex/context/base/node-ser.lua
@@ -9,24 +9,37 @@ if not modules then modules = { } end modules ['node-ser'] = {
-- beware, some field names will change in a next releases
-- of luatex; this is pretty old code that needs an overhaul
-local type, format, rep = type, string.format, string.rep
+local type = type
local concat, tohash, sortedkeys, printtable = table.concat, table.tohash, table.sortedkeys, table.print
+local formatters, format, rep = string.formatters, string.format, string.rep
local allocate = utilities.storage.allocate
-local nodes, node = nodes, node
+local context = context
+local nodes = nodes
+local node = node
-local traverse = node.traverse
-local is_node = node.is_node
+local traverse = nodes.traverse
+local is_node = nodes.is_node
local nodecodes = nodes.nodecodes
+local subtcodes = nodes.codes
local noadcodes = nodes.noadcodes
-local nodefields = nodes.fields
+local getfields = nodes.fields
+
+local tonode = nodes.tonode
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
+----- utfchar = utf.char
+local f_char = formatters["%U"]
+----- fontchars = { } table.setmetatableindex(fontchars,function(t,k) fontchars = fonts.hashes.characters return fontchars[k] end)
+
+----- f_char = utilities.strings.chkuni -- formatters["%!chkuni!"]
+
local expand = allocate ( tohash {
+ -- text:
"list", -- list_ptr & ins_ptr & adjust_ptr
"pre", --
"post", --
@@ -42,6 +55,23 @@ local expand = allocate ( tohash {
"action", -- action_ptr
"value", -- user_defined nodes with subtype 'a' en 'n'
"head",
+ -- math:
+ "nucleus",
+ "sup",
+ "sub",
+ "list",
+ "num",
+ "denom",
+ "left",
+ "right",
+ "display",
+ "text",
+ "script",
+ "scriptscript",
+ "delim",
+ "degree",
+ "accent",
+ "bot_accent",
} )
-- page_insert: "height", "last_ins_ptr", "best_ins_ptr"
@@ -72,8 +102,9 @@ nodes.ignorablefields = ignore
-- not ok yet:
-local function astable(n,sparse) -- not yet ok
- local f, t = nodefields(n), { }
+local function astable(n,sparse) -- not yet ok, might get obsolete anyway
+ n = tonode(n)
+ local f, t = getfields(n), { }
for i=1,#f do
local v = f[i]
local d = n[v]
@@ -101,10 +132,9 @@ setinspector(function(v) if is_node(v) then printtable(astable(v),tostring(v)) r
-- under construction:
-local function totable(n,flat,verbose,noattributes)
- -- todo: no local function
+local function totable(n,flat,verbose,noattributes) -- nicest: n,true,true,true
local function to_table(n,flat,verbose,noattributes) -- no need to pass
- local f = nodefields(n)
+ local f = getfields(n)
local tt = { }
for k=1,#f do
local v = f[k]
@@ -118,7 +148,7 @@ local function totable(n,flat,verbose,noattributes)
if type(nv) == "number" or type(nv) == "string" then
tt[v] = nv
else
- tt[v] = totable(nv,flat,verbose)
+ tt[v] = totable(nv,flat,verbose,noattributes)
end
elseif type(nv) == "table" then
tt[v] = nv -- totable(nv,flat,verbose) -- data
@@ -128,7 +158,27 @@ local function totable(n,flat,verbose,noattributes)
end
end
if verbose then
- tt.type = nodecodes[tt.id]
+ local subtype = tt.subtype
+ local id = tt.id
+ local nodename = nodecodes[id]
+ tt.id = nodename
+ local subtypes = subtcodes[nodename]
+ if subtypes then
+ tt.subtype = subtypes[subtype]
+ elseif subtype == 0 then
+ tt.subtype = nil
+ else
+ -- we need a table
+ end
+ if tt.char then
+ tt.char = f_char(tt.char)
+ end
+ if tt.small_char then
+ tt.small_char = f_char(tt.small_char)
+ end
+ if tt.large_char then
+ tt.large_char = f_char(tt.large_char)
+ end
end
return tt
end
@@ -137,14 +187,18 @@ local function totable(n,flat,verbose,noattributes)
local t, tn = { }, 0
while n do
tn = tn + 1
- t[tn] = to_table(n,flat,verbose,noattributes)
+ local nt = to_table(n,flat,verbose,noattributes)
+ t[tn] = nt
+ nt.next = nil
+ nt.prev = nil
n = n.next
end
return t
else
- local t = to_table(n)
- if n.next then
- t.next = totable(n.next,flat,verbose,noattributes)
+ local t = to_table(n,flat,verbose,noattributes)
+ local n = n.next
+ if n then
+ t.next = totable(n,flat,verbose,noattributes)
end
return t
end
@@ -153,7 +207,8 @@ local function totable(n,flat,verbose,noattributes)
end
end
-nodes.totable = totable
+nodes.totable = function(n,...) return totable(tonode(n),...) end
+nodes.totree = function(n) return totable(tonode(n),true,true,true) end -- no attributes, todo: attributes in k,v list
local function key(k)
return ((type(k) == "number") and "["..k.."]") or k
@@ -161,7 +216,7 @@ end
-- not ok yet; this will become a module
--- todo: adapt to nodecodes etc
+-- todo: adapt to nodecodes etc .. use formatters
local function serialize(root,name,handle,depth,m,noattributes)
handle = handle or print
@@ -186,12 +241,12 @@ local function serialize(root,name,handle,depth,m,noattributes)
if root then
local fld
if root.id then
- fld = nodefields(root) -- we can cache these (todo)
+ fld = getfields(root) -- we can cache these (todo)
else
fld = sortedkeys(root)
end
if type(root) == 'table' and root['type'] then -- userdata or table
- handle(format("%s %s=%q,",depth,'type',root['type']))
+ handle(format("%s type=%q,",depth,root['type']))
end
for f=1,#fld do
local k = fld[f]
@@ -241,7 +296,7 @@ function nodes.serialize(root,name,noattributes)
n = n + 1
t[n] = s
end
- serialize(root,name,flush,nil,0,noattributes)
+ serialize(tonode(root),name,flush,nil,0,noattributes)
return concat(t,"\n")
end
@@ -258,6 +313,7 @@ function nodes.visualizebox(...) -- to be checked .. will move to module anyway
end
function nodes.list(head,n) -- name might change to nodes.type -- to be checked .. will move to module anyway
+ head = tonode(head)
if not n then
context.starttyping(true)
end
@@ -275,6 +331,7 @@ function nodes.list(head,n) -- name might change to nodes.type -- to be checked
end
function nodes.print(head,n)
+ head = tonode(head)
while head do
local id = head.id
logs.writer(string.formatters["%w%S"],n or 0,head)
diff --git a/Master/texmf-dist/tex/context/base/node-shp.lua b/Master/texmf-dist/tex/context/base/node-shp.lua
index 8f7a411a719..6ebfd767fb4 100644
--- a/Master/texmf-dist/tex/context/base/node-shp.lua
+++ b/Master/texmf-dist/tex/context/base/node-shp.lua
@@ -14,6 +14,7 @@ local concat, sortedpairs = table.concat, table.sortedpairs
local setmetatableindex = table.setmetatableindex
local nodecodes = nodes.nodecodes
+local whatsitcodes = nodes.whatsitcodes
local tasks = nodes.tasks
local handlers = nodes.handlers
@@ -23,28 +24,52 @@ local disc_code = nodecodes.disc
local mark_code = nodecodes.mark
local kern_code = nodecodes.kern
local glue_code = nodecodes.glue
+local whatsit_code = nodecodes.whatsit
-local texbox = tex.box
+local texgetbox = tex.getbox
local free_node = node.free
local remove_node = node.remove
local traverse_nodes = node.traverse
-local function cleanup(head) -- rough
+local removables = {
+ [whatsitcodes.open] = true,
+ [whatsitcodes.close] = true,
+ [whatsitcodes.write] = true,
+ [whatsitcodes.pdfdest] = true,
+ [whatsitcodes.pdfsavepos] = true,
+ [whatsitcodes.latelua] = true,
+}
+
+local function cleanup_redundant(head)
local start = head
while start do
local id = start.id
- if id == disc_code or (id == glue_code and not start.writable) or (id == kern_code and start.kern == 0) or id == mark_code then
- head, start, tmp = remove_node(head,start)
- free_node(tmp)
+ if id == disc_code then
+ head, start = remove_node(head,start,true)
+ -- elseif id == glue_code then
+ -- if start.writable then
+ -- start = start.next
+ -- elseif some_complex_check_on_glue_spec then
+ -- head, start = remove_node(head,start,true)
+ -- else
+ -- start = start.next
+ -- end
+ elseif id == kern_code then
+ if start.kern == 0 then
+ head, start = remove_node(head,start,true)
+ else
+ start = start.next
+ end
+ elseif id == mark_code then
+ head, start = remove_node(head,start,true)
elseif id == hlist_code or id == vlist_code then
local sl = start.list
if sl then
- start.list = cleanup(sl)
+ start.list = cleanup_redundant(sl)
start = start.next
else
- head, start, tmp = remove_node(head,start)
- free_node(tmp)
+ head, start = remove_node(head,start,true)
end
else
start = start.next
@@ -53,27 +78,56 @@ local function cleanup(head) -- rough
return head
end
-directives.register("backend.cleanup", function()
- tasks.enableaction("shipouts","nodes.handlers.cleanuppage")
-end)
+local function cleanup_flushed(head) -- rough
+ local start = head
+ while start do
+ local id = start.id
+ if id == whatsit_code and removables[start.subtype] then
+ head, start = remove_node(head,start,true)
+ elseif id == hlist_code or id == vlist_code then
+ local sl = start.list
+ if sl then
+ start.list = cleanup_flushed(sl)
+ start = start.next
+ else
+ head, start = remove_node(head,start,true)
+ end
+ else
+ start = start.next
+ end
+ end
+ return head
+end
function handlers.cleanuppage(head)
-- about 10% of the nodes make no sense for the backend
- return cleanup(head), true
+ return cleanup_redundant(head), true
end
+function handlers.cleanupbox(head)
+ return cleanup_flushed(head), true
+end
+
+directives.register("backend.cleanup", function()
+ tasks.enableaction("shipouts","nodes.handlers.cleanuppage")
+end)
+
local actions = tasks.actions("shipouts") -- no extra arguments
function handlers.finalize(head) -- problem, attr loaded before node, todo ...
return actions(head)
end
+function commands.cleanupbox(n)
+ cleanup_flushed(texgetbox(n))
+end
+
-- handlers.finalize = actions
-- interface
function commands.finalizebox(n)
- actions(texbox[n])
+ actions(texgetbox(n))
end
-- just in case we want to optimize lookups:
@@ -129,8 +183,8 @@ trackers.register("nodes.frequencies",function(v)
if type(v) == "string" then
frequencies.filename = v
end
- handlers.frequencies_shipouts_before = register("shipouts", "begin")
- handlers.frequencies_shipouts_after = register("shipouts", "end")
+ handlers.frequencies_shipouts_before = register("shipouts", "begin")
+ handlers.frequencies_shipouts_after = register("shipouts", "end")
handlers.frequencies_processors_before = register("processors", "begin")
handlers.frequencies_processors_after = register("processors", "end")
tasks.prependaction("shipouts", "before", "nodes.handlers.frequencies_shipouts_before")
diff --git a/Master/texmf-dist/tex/context/base/node-tra.lua b/Master/texmf-dist/tex/context/base/node-tra.lua
index 916b2143d03..08110727705 100644
--- a/Master/texmf-dist/tex/context/base/node-tra.lua
+++ b/Master/texmf-dist/tex/context/base/node-tra.lua
@@ -18,10 +18,10 @@ local clock = os.gettimeofday or os.clock -- should go in environment
local report_nodes = logs.reporter("nodes","tracing")
-nodes = nodes or { }
-
local nodes, node, context = nodes, node, context
+local texgetattribute = tex.getattribute
+
local tracers = nodes.tracers or { }
nodes.tracers = tracers
@@ -34,9 +34,30 @@ nodes.handlers = handlers
local injections = nodes.injections or { }
nodes.injections = injections
-local traverse_nodes = node.traverse
-local traverse_by_id = node.traverse_id
-local count_nodes = nodes.count
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getchar = nuts.getchar
+local getsubtype = nuts.getsubtype
+local getlist = nuts.getlist
+
+local setattr = nuts.setattr
+
+local flush_list = nuts.flush_list
+local count_nodes = nuts.count
+local used_nodes = nuts.usedlist
+
+local traverse_by_id = nuts.traverse_id
+local traverse_nodes = nuts.traverse
+local d_tostring = nuts.tostring
+
+local nutpool = nuts.pool
+local new_rule = nutpool.rule
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -51,13 +72,11 @@ local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
local rule_code = nodecodes.rule
local whatsit_code = nodecodes.whatsit
-local spec_code = nodecodes.glue_spec
+local gluespec_code = nodecodes.gluespec
local localpar_code = whatcodes.localpar
local dir_code = whatcodes.dir
-local nodepool = nodes.pool
-
local dimenfactors = number.dimenfactors
local formatters = string.formatters
@@ -67,15 +86,16 @@ function nodes.showlist(head, message)
if message then
report_nodes(message)
end
- for n in traverse_nodes(head) do
- report_nodes(tostring(n))
+ for n in traverse_nodes(tonut(head)) do
+ report_nodes(d_tostring(n))
end
end
function nodes.handlers.checkglyphs(head,message)
+ local h = tonut(head)
local t = { }
- for g in traverse_by_id(glyph_code,head) do
- t[#t+1] = formatters["%U:%s"](g.char,g.subtype)
+ for g in traverse_by_id(glyph_code,h) do
+ t[#t+1] = formatters["%U:%s"](getchar(g),getsubtype(g))
end
if #t > 0 then
if message and message ~= "" then
@@ -89,14 +109,14 @@ end
function nodes.handlers.checkforleaks(sparse)
local l = { }
- local q = node.usedlist()
- for p in traverse(q) do
- local s = table.serialize(nodes.astable(p,sparse),nodecodes[p.id])
+ local q = used_nodes()
+ for p in traverse_nodes(q) do
+ local s = table.serialize(nodes.astable(p,sparse),nodecodes[getid(p)])
l[s] = (l[s] or 0) + 1
end
- node.flush_list(q)
+ flush_list(q)
for k, v in next, l do
- write_nl(formatters["%s * %s"](v,k))
+ report_nodes("%s * %s",v,k)
end
end
@@ -104,39 +124,40 @@ local f_sequence = formatters["U+%04X:%s"]
local function tosequence(start,stop,compact)
if start then
+ start = tonut(start)
+ stop = stop and tonut(stop)
local t = { }
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- local c = start.char
+ local c = getchar(start)
if compact then
- if start.components then
- t[#t+1] = tosequence(start.components,nil,compact)
+ local components = getfield(start,"components")
+ if components then
+ t[#t+1] = tosequence(components,nil,compact)
else
t[#t+1] = utfchar(c)
end
else
t[#t+1] = f_sequence(c,utfchar(c))
end
- elseif id == whatsit_code and start.subtype == localpar_code or start.subtype == dir_code then
- t[#t+1] = "[" .. start.dir .. "]"
elseif id == rule_code then
if compact then
t[#t+1] = "|"
else
t[#t+1] = nodecodes[id]
end
+ elseif id == whatsit_code and getsubtype(start) == localpar_code or getsubtype(start) == dir_code then
+ t[#t+1] = "[" .. getfield(start,"dir") .. "]"
+ elseif compact then
+ t[#t+1] = "[]"
else
- if compact then
- t[#t+1] = "[]"
- else
- t[#t+1] = nodecodes[id]
- end
+ t[#t+1] = nodecodes[id]
end
if start == stop then
break
else
- start = start.next
+ start = getnext(start)
end
end
if compact then
@@ -152,21 +173,23 @@ end
nodes.tosequence = tosequence
function nodes.report(t,done)
- report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(t))
+ report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(tonut(t)))
end
function nodes.packlist(head)
local t = { }
- for n in traverse(head) do
- t[#t+1] = tostring(n)
+ for n in traverse_nodes(tonut(head)) do
+ t[#t+1] = d_tostring(n)
end
return t
end
function nodes.idstostring(head,tail)
+ head = tonut(head)
+ tail = tail and tonut(tail)
local t, last_id, last_n = { }, nil, 0
for n in traverse_nodes(head,tail) do -- hm, does not stop at tail
- local id = n.id
+ local id = getid(n)
if not last_id then
last_id, last_n = id, 1
elseif last_id == id then
@@ -194,6 +217,8 @@ function nodes.idstostring(head,tail)
end
-- function nodes.xidstostring(head,tail) -- only for special tracing of backlinks
+-- head = tonut(head)
+-- tail = tonut(tail)
-- local n = head
-- while n.next do
-- n = n.next
@@ -216,7 +241,7 @@ end
-- if n == head then
-- break
-- end
--- n = n.prev
+-- n = getprev(n)
-- end
-- if not last_id then
-- t[#t+1] = "no nodes"
@@ -229,48 +254,56 @@ end
-- end
local function showsimplelist(h,depth,n)
+ h = h and tonut(h)
while h do
- write_nl(rep(" ",n) .. tostring(h))
+ report_nodes("% w%s",n,d_tostring(h))
if not depth or n < depth then
- local id = h.id
+ local id = getid(h)
if id == hlist_code or id == vlist_code then
- showsimplelist(h.list,depth,n+1)
+ showsimplelist(getlist(h),depth,n+1)
end
end
- h = h.next
+ h = getnext(h)
end
end
---~ \startluacode
---~ callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end)
---~ \stopluacode
---~ \vbox{b\footnote{n}a}
---~ \startluacode
---~ callback.register('buildpage_filter',nil)
---~ \stopluacode
+-- \startluacode
+-- callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end)
+-- \stopluacode
+-- \vbox{b\footnote{n}a}
+-- \startluacode
+-- callback.register('buildpage_filter',nil)
+-- \stopluacode
nodes.showsimplelist = function(h,depth) showsimplelist(h,depth,0) end
local function listtoutf(h,joiner,textonly,last)
- local joiner = (joiner == true and utfchar(0x200C)) or joiner -- zwnj
local w = { }
while h do
- local id = h.id
+ local id = getid(h)
if id == glyph_code then -- always true
- w[#w+1] = utfchar(h.char)
+ local c = getchar(h)
+ w[#w+1] = c >= 0 and utfchar(c) or formatters["<%i>"](c)
if joiner then
w[#w+1] = joiner
end
elseif id == disc_code then
- local pre, rep, pos = h.pre, h.replace, h.post
+ local pre = getfield(h,"pre")
+ local pos = getfield(h,"post")
+ local rep = getfield(h,"replace")
w[#w+1] = formatters["[%s|%s|%s]"] (
pre and listtoutf(pre,joiner,textonly) or "",
- rep and listtoutf(rep,joiner,textonly) or "",
- mid and listtoutf(mid,joiner,textonly) or ""
+ pos and listtoutf(pos,joiner,textonly) or "",
+ rep and listtoutf(rep,joiner,textonly) or ""
)
elseif textonly then
- if id == glue_code and h.spec and h.spec.width > 0 then
- w[#w+1] = " "
+ if id == glue_code then
+ local spec = getfield(h,"spec")
+ if spec and getfield(spec,"width") > 0 then
+ w[#w+1] = " "
+ end
+ elseif id == hlist_code or id == vlist_code then
+ w[#w+1] = "[]"
end
else
w[#w+1] = "[-]"
@@ -278,24 +311,28 @@ local function listtoutf(h,joiner,textonly,last)
if h == last then
break
else
- h = h.next
+ h = getnext(h)
end
end
return concat(w)
end
-nodes.listtoutf = listtoutf
+function nodes.listtoutf(h,joiner,textonly,last)
+ local joiner = joiner == true and utfchar(0x200C) or joiner -- zwnj
+ return listtoutf(tonut(h),joiner,textonly,last and tonut(last))
+end
local what = { [0] = "unknown", "line", "box", "indent", "row", "cell" }
local function showboxes(n,symbol,depth)
- depth, symbol = depth or 0, symbol or "."
- for n in traverse_nodes(n) do
- local id = n.id
+ depth = depth or 0
+ symbol = symbol or "."
+ for n in traverse_nodes(tonut(n)) do
+ local id = getid(n)
if id == hlist_code or id == vlist_code then
- local s = n.subtype
+ local s = getsubtype(n)
report_nodes(rep(symbol,depth) .. what[s] or s)
- showboxes(n.list,symbol,depth+1)
+ showboxes(getlist(n),symbol,depth+1)
end
end
end
@@ -318,15 +355,8 @@ local stripper = lpeg.patterns.stripzeros
local dimenfactors = number.dimenfactors
-local function numbertodimen(d,unit,fmt,strip)
- if not d then
- local str = formatters[fmt](0,unit)
- return strip and lpegmatch(stripper,str) or str
- end
- local t = type(d)
- if t == 'string' then
- return d
- end
+local function nodetodimen(d,unit,fmt,strip)
+ d = tonut(d) -- tricky: direct nuts are an issue
if unit == true then
unit = "pt"
fmt = "%0.5f%s"
@@ -338,27 +368,23 @@ local function numbertodimen(d,unit,fmt,strip)
fmt = "%0.5f%s"
end
end
- if t == "number" then
- local str = formatters[fmt](d*dimenfactors[unit],unit)
- return strip and lpegmatch(stripper,str) or str
- end
- local id = node.id
+ local id = getid(d)
if id == kern_code then
- local str = formatters[fmt](d.width*dimenfactors[unit],unit)
+ local str = formatters[fmt](getfield(d,"width")*dimenfactors[unit],unit)
return strip and lpegmatch(stripper,str) or str
end
if id == glue_code then
- d = d.spec
+ d = getfield(d,"spec")
end
- if not d or not d.id == spec_code then
+ if not d or not getid(d) == gluespec_code then
local str = formatters[fmt](0,unit)
return strip and lpegmatch(stripper,str) or str
end
- local width = d.width
- local plus = d.stretch_order
- local minus = d.shrink_order
- local stretch = d.stretch
- local shrink = d.shrink
+ local width = getfield(d,"width")
+ local plus = getfield(d,"stretch_order")
+ local minus = getfield(d,"shrink_order")
+ local stretch = getfield(d,"stretch")
+ local shrink = getfield(d,"shrink")
if plus ~= 0 then
plus = " plus " .. stretch/65536 .. fillcodes[plus]
elseif stretch ~= 0 then
@@ -375,11 +401,39 @@ local function numbertodimen(d,unit,fmt,strip)
else
minus = ""
end
- local str = formatters[fmt](d.width*dimenfactors[unit],unit)
+ local str = formatters[fmt](getfield(d,"width")*dimenfactors[unit],unit)
return (strip and lpegmatch(stripper,str) or str) .. plus .. minus
end
+local function numbertodimen(d,unit,fmt,strip)
+ if not d then
+ local str = formatters[fmt](0,unit)
+ return strip and lpegmatch(stripper,str) or str
+ end
+ local t = type(d)
+ if t == 'string' then
+ return d
+ elseif t == "number" then
+ if unit == true then
+ unit = "pt"
+ fmt = "%0.5f%s"
+ else
+ unit = unit or 'pt'
+ if not fmt then
+ fmt = "%s%s"
+ elseif fmt == true then
+ fmt = "%0.5f%s"
+ end
+ end
+ local str = formatters[fmt](d*dimenfactors[unit],unit)
+ return strip and lpegmatch(stripper,str) or str
+ else
+ return nodetodimen(d,unit,fmt,strip) -- real node
+ end
+end
+
number.todimen = numbertodimen
+nodes .todimen = nodetodimen
function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end
function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end
@@ -394,6 +448,19 @@ function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end
function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end
function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end
+function nodes.topoints (n,fmt) return nodetodimen(n,"pt",fmt) end
+function nodes.toinches (n,fmt) return nodetodimen(n,"in",fmt) end
+function nodes.tocentimeters (n,fmt) return nodetodimen(n,"cm",fmt) end
+function nodes.tomillimeters (n,fmt) return nodetodimen(n,"mm",fmt) end
+function nodes.toscaledpoints(n,fmt) return nodetodimen(n,"sp",fmt) end
+function nodes.toscaledpoints(n) return n .. "sp" end
+function nodes.tobasepoints (n,fmt) return nodetodimen(n,"bp",fmt) end
+function nodes.topicas (n,fmt) return nodetodimen(n "pc",fmt) end
+function nodes.todidots (n,fmt) return nodetodimen(n,"dd",fmt) end
+function nodes.tociceros (n,fmt) return nodetodimen(n,"cc",fmt) end
+function nodes.tonewdidots (n,fmt) return nodetodimen(n,"nd",fmt) end
+function nodes.tonewciceros (n,fmt) return nodetodimen(n,"nc",fmt) end
+
-- stop redefinition
local points = function(n)
@@ -439,8 +506,13 @@ number.basepoints = basepoints
number.pts = pts
number.nopts = nopts
-local colors = { }
-tracers.colors = colors
+nodes.points = function(n) return numbertodimen(n,"pt",true,true) end
+nodes.basepoints = function(n) return numbertodimen(n,"bp",true,true) end
+nodes.pts = function(n) return numbertodimen(n,"pt",true) end
+nodes.nopts = function(n) return format("%.5f",n*ptfactor) end
+
+local colors = { }
+tracers.colors = colors
local unsetvalue = attributes.unsetvalue
@@ -450,36 +522,34 @@ local m_color = attributes.list[a_color] or { }
function colors.set(n,c,s)
local mc = m_color[c]
- if not mc then
- n[a_color] = unsetvalue
+ local nn = tonut(n)
+ if mc then
+ local mm = s or texgetattribute(a_colormodel)
+ setattr(nn,a_colormodel,mm <= 0 and mm or 1)
+ setattr(nn,a_color,mc)
else
- if not n[a_colormodel] then
- n[a_colormodel] = s or 1
- end
- n[a_color] = mc
+ setattr(nn,a_color,unsetvalue)
end
return n
end
function colors.setlist(n,c,s)
- local f = n
- while n do
- local mc = m_color[c]
- if not mc then
- n[a_color] = unsetvalue
- else
- if not n[a_colormodel] then
- n[a_colormodel] = s or 1
- end
- n[a_color] = mc
- end
- n = n.next
+ local nn = tonut(n)
+ local mc = m_color[c] or unsetvalue
+ local mm = s or texgetattribute(a_colormodel)
+ if mm <= 0 then
+ mm = 1
+ end
+ while nn do
+ setattr(nn,a_colormodel,mm)
+ setattr(nn,a_color,mc)
+ nn = getnext(nn)
end
- return f
+ return n
end
function colors.reset(n)
- n[a_color] = unsetvalue
+ setattr(tonut(n),a_color,unsetvalue)
return n
end
@@ -492,38 +562,108 @@ local a_transparency = attributes.private('transparency')
local m_transparency = attributes.list[a_transparency] or { }
function transparencies.set(n,t)
- local mt = m_transparency[t]
- if not mt then
- n[a_transparency] = unsetvalue
- else
- n[a_transparency] = mt
- end
+ setattr(tonut(n),a_transparency,m_transparency[t] or unsetvalue)
return n
end
function transparencies.setlist(n,c,s)
- local f = n
- while n do
- local mt = m_transparency[c]
- if not mt then
- n[a_transparency] = unsetvalue
- else
- n[a_transparency] = mt
- end
- n = n.next
+ local nn = tonut(n)
+ local mt = m_transparency[c] or unsetvalue
+ while nn do
+ setattr(nn,a_transparency,mt)
+ nn = getnext(nn)
end
- return f
+ return n
end
function transparencies.reset(n)
- n[a_transparency] = unsetvalue
+ setattr(n,a_transparency,unsetvalue)
return n
end
-- for the moment here
-nodes.visualizers = { }
+local visualizers = nodes.visualizers or { }
+nodes.visualizers = visualizers
-function nodes.visualizers.handler(head)
+function visualizers.handler(head)
return head, false
end
+
+-- we could cache attribute lists and set attr (copy will increment count) .. todo ..
+-- although tracers are used seldom
+
+local function setproperties(n,c,s)
+ local nn = tonut(n)
+ local mm = texgetattribute(a_colormodel)
+ setattr(nn,a_colormodel,mm > 0 and mm or 1)
+ setattr(nn,a_color,m_color[c])
+ setattr(nn,a_transparency,m_transparency[c])
+ return n
+end
+
+tracers.setproperties = setproperties
+
+function tracers.setlist(n,c,s)
+ local nn = tonut(n)
+ local mc = m_color[c]
+ local mt = m_transparency[c]
+ local mm = texgetattribute(a_colormodel)
+ if mm <= 0 then
+ mm = 1
+ end
+ while nn do
+ setattr(nn,a_colormodel,mm)
+ setattr(nn,a_color,mc)
+ setattr(nn,a_transparency,mt)
+ nn = getnext(nn)
+ end
+ return n
+end
+
+function tracers.resetproperties(n)
+ local nn = tonut(n)
+ setattr(nn,a_color,unsetvalue)
+ setattr(nn,a_transparency,unsetvalue)
+ return n
+end
+
+-- this one returns a nut
+
+local nodestracerpool = { }
+local nutstracerpool = { }
+
+tracers.pool = {
+ nodes = nodestracerpool,
+ nuts = nutstracerpool,
+}
+
+table.setmetatableindex(nodestracerpool,function(t,k,v)
+ local f = nutstracerpool[k]
+ local v = function(...)
+ return tonode(f(...))
+ end
+ t[k] = v
+ return v
+end)
+
+function nutstracerpool.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup)
+ return setproperties(new_rule(w,h,d),c,s)
+end
+
+tracers.rule = nodestracerpool.rule -- for a while
+
+-- local function show(head,n,message)
+-- print("START",message or "")
+-- local i = 0
+-- for current in traverse(head) do
+-- local prev = getprev(current)
+-- local next = getnext(current)
+-- i = i + 1
+-- print(i, prev and nodecodes[getid(prev)],nodecodes[getid(current)],next and nodecodes[getid(next)])
+-- if i == n then
+-- break
+-- end
+-- end
+-- print("STOP", message or "")
+-- end
diff --git a/Master/texmf-dist/tex/context/base/node-tsk.lua b/Master/texmf-dist/tex/context/base/node-tsk.lua
index 596ac765ae2..dfa570b2479 100644
--- a/Master/texmf-dist/tex/context/base/node-tsk.lua
+++ b/Master/texmf-dist/tex/context/base/node-tsk.lua
@@ -18,10 +18,11 @@ local report_tasks = logs.reporter("tasks")
local allocate = utilities.storage.allocate
+local context = context
local nodes = nodes
-nodes.tasks = nodes.tasks or { }
-local tasks = nodes.tasks
+local tasks = nodes.tasks or { }
+nodes.tasks = tasks
local tasksdata = { } -- no longer public
@@ -116,6 +117,14 @@ function tasks.disableaction(name,action)
end
end
+function tasks.setaction(name,action,value)
+ if value then
+ tasks.enableaction(name,action)
+ else
+ tasks.disableaction(name,action)
+ end
+end
+
function tasks.enablegroup(name,group)
local data = validgroup(name,"enable group")
if data then
@@ -317,7 +326,8 @@ end
tasks.new {
name = "processors",
- arguments = 4,
+ arguments = 5, -- often only the first is used, and the last three are only passed in hpack filter
+-- arguments = 2,
processor = nodeprocessor,
sequence = {
"before", -- for users
diff --git a/Master/texmf-dist/tex/context/base/node-tst.lua b/Master/texmf-dist/tex/context/base/node-tst.lua
index bfe0051bdfd..7f5102d5fcd 100644
--- a/Master/texmf-dist/tex/context/base/node-tst.lua
+++ b/Master/texmf-dist/tex/context/base/node-tst.lua
@@ -24,17 +24,26 @@ local rightskip_code = skipcodes.rightskip
local abovedisplayshortskip_code = skipcodes.abovedisplayshortskip
local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
-local find_node_tail = node.tail or node.slide
+local nuts = nodes.nuts
-function nodes.leftmarginwidth(n) -- todo: three values
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getchar = nuts.getchar
+local getsubtype = nuts.getsubtype
+
+local find_node_tail = nuts.tail
+
+function nuts.leftmarginwidth(n) -- todo: three values
while n do
- local id = n.id
+ local id = getid(n)
if id == glue_code then
- return n.subtype == leftskip_code and n.spec.width or 0
+ return getsubtype(n) == leftskip_code and getfield(getfield(n,"spec"),"width") or 0
elseif id == whatsit_code then
- n = n.next
+ n = getnext(n)
elseif id == hlist_code then
- return n.width
+ return getfield(n,"width")
else
break
end
@@ -42,15 +51,15 @@ function nodes.leftmarginwidth(n) -- todo: three values
return 0
end
-function nodes.rightmarginwidth(n)
+function nuts.rightmarginwidth(n)
if n then
n = find_node_tail(n)
while n do
- local id = n.id
+ local id = getid(n)
if id == glue_code then
- return n.subtype == rightskip_code and n.spec.width or 0
+ return getsubtype(n) == rightskip_code and getfield(getfield(n,"spec"),"width") or 0
elseif id == whatsit_code then
- n = n.prev
+ n = getprev(n)
else
break
end
@@ -59,15 +68,15 @@ function nodes.rightmarginwidth(n)
return false
end
-function nodes.somespace(n,all)
+function nuts.somespace(n,all)
if n then
- local id = n.id
+ local id = getid(n)
if id == glue_code then
- return (all or (n.spec.width ~= 0)) and glue_code
+ return (all or (getfield(getfield(n,"spec"),"width") ~= 0)) and glue_code
elseif id == kern_code then
- return (all or (n.kern ~= 0)) and kern
+ return (all or (getfield(n,"kern") ~= 0)) and kern
elseif id == glyph_code then
- local category = chardata[n.char].category
+ local category = chardata[getchar(n)].category
-- maybe more category checks are needed
return (category == "zs") and glyph_code
end
@@ -75,12 +84,12 @@ function nodes.somespace(n,all)
return false
end
-function nodes.somepenalty(n,value)
+function nuts.somepenalty(n,value)
if n then
- local id = n.id
+ local id = getid(n)
if id == penalty_code then
if value then
- return n.penalty == value
+ return getfield(n,"penalty") == value
else
return true
end
@@ -89,32 +98,38 @@ function nodes.somepenalty(n,value)
return false
end
-function nodes.is_display_math(head)
- local n = head.prev
+function nuts.is_display_math(head)
+ local n = getprev(head)
while n do
- local id = n.id
+ local id = getid(n)
if id == penalty_code then
elseif id == glue_code then
- if n.subtype == abovedisplayshortskip_code then
+ if getsubtype(n) == abovedisplayshortskip_code then
return true
end
else
break
end
- n = n.prev
+ n = getprev(n)
end
- n = head.next
+ n = getnext(head)
while n do
- local id = n.id
+ local id = getid(n)
if id == penalty_code then
elseif id == glue_code then
- if n.subtype == belowdisplayshortskip_code then
+ if getsubtype(n) == belowdisplayshortskip_code then
return true
end
else
break
end
- n = n.next
+ n = getnext(n)
end
return false
end
+
+nodes.leftmarginwidth = nodes.vianuts(nuts.leftmarginwidth)
+nodes.rightmarginwidth = nodes.vianuts(nuts.rightmarginwidth)
+nodes.somespace = nodes.vianuts(nuts.somespace)
+nodes.somepenalty = nodes.vianuts(nuts.somepenalty)
+nodes.is_display_math = nodes.vianuts(nuts.is_display_math)
diff --git a/Master/texmf-dist/tex/context/base/node-typ.lua b/Master/texmf-dist/tex/context/base/node-typ.lua
index 6e1a31643f2..4c33e31991d 100644
--- a/Master/texmf-dist/tex/context/base/node-typ.lua
+++ b/Master/texmf-dist/tex/context/base/node-typ.lua
@@ -6,26 +6,40 @@ if not modules then modules = { } end modules ['node-typ'] = {
license = "see context related readme files"
}
-local utfvalues = utf.values
+-- code has been moved to blob-ini.lua
-local currentfont = font.current
-local fontparameters = fonts.hashes.parameters
+local typesetters = nodes.typesetters or { }
+nodes.typesetters = typesetters
-local hpack = node.hpack
-local vpack = node.vpack
-local fast_hpack = nodes.fasthpack
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
-local nodepool = nodes.pool
+local setfield = nuts.setfield
+local getfont = nuts.getfont
-local newglyph = nodepool.glyph
-local newglue = nodepool.glue
+local hpack_node_list = nuts.hpack
+local vpack_node_list = nuts.vpack
+local fast_hpack_list = nuts.fasthpack
+local copy_node = nuts.copy
-typesetters = typesetters or { }
+local nodepool = nuts.pool
+local new_glyph = nodepool.glyph
+local new_glue = nodepool.glue
-local function tonodes(str,fontid,spacing) -- quick and dirty
+local utfvalues = utf.values
+
+local currentfont = font.current
+local fontparameters = fonts.hashes.parameters
+
+local function tonodes(str,fontid,spacing,templateglyph) -- quick and dirty
local head, prev = nil, nil
if not fontid then
- fontid = currentfont()
+ if templateglyph then
+ fontid = getfont(templateglyph)
+ else
+ fontid = currentfont()
+ end
end
local fp = fontparameters[fontid]
local s, p, m
@@ -39,11 +53,15 @@ local function tonodes(str,fontid,spacing) -- quick and dirty
local next
if c == 32 then
if not spacedone then
- next = newglue(s,p,m)
+ next = new_glue(s,p,m)
spacedone = true
end
+ elseif templateglyph then
+ next = copy_glyph(templateglyph)
+ setfield(next,"char",c)
+ spacedone = false
else
- next = newglyph(fontid or 1,c)
+ next = new_glyph(fontid or 1,c)
spacedone = false
end
if not next then
@@ -51,29 +69,54 @@ local function tonodes(str,fontid,spacing) -- quick and dirty
elseif not head then
head = next
else
- prev.next = next
- next.prev = prev
+ setfield(prev,"next",next)
+ setfield(next,"prev",prev)
end
prev = next
end
return head
end
-typesetters.tonodes = tonodes
-
-function typesetters.hpack(str,fontid,spacing)
- return hpack(tonodes(str,fontid,spacing),"exactly")
+local function tohpack(str,fontid,spacing)
+ return hpack_node_list(tonodes(str,fontid,spacing),"exactly")
end
-function typesetters.fast_hpack(str,fontid,spacing)
- return fast_hpack(tonodes(str,fontid,spacing),"exactly")
+local function tohpackfast(str,fontid,spacing)
+ return fast_hpack_list(tonodes(str,fontid,spacing),"exactly")
end
-function typesetters.vpack(str,fontid,spacing)
+local function tovpack(str,fontid,spacing)
-- vpack is just a hack, and a proper implentation is on the agenda
-- as it needs more info etc than currently available
- return vpack(tonodes(str,fontid,spacing))
+ return vpack_node_list(tonodes(str,fontid,spacing))
end
---~ node.write(typesetters.hpack("Hello World!"))
---~ node.write(typesetters.hpack("Hello World!",1,100*1024*10))
+local tovpackfast = tovpack
+
+local tnuts = { }
+nuts.typesetters = tnuts
+
+tnuts.tonodes = tonodes
+tnuts.tohpack = tohpack
+tnuts.tohpackfast = tohpackfast
+tnuts.tovpack = tovpack
+tnuts.tovpackfast = tovpackfast
+
+tnuts.hpack = tohpack -- obsolete
+tnuts.fast_hpack = tohpackfast -- obsolete
+tnuts.vpack = tovpack -- obsolete
+
+typesetters.tonodes = function(...) local h, b = tonodes (...) return tonode(h), b end
+typesetters.tohpack = function(...) local h, b = tohpack (...) return tonode(h), b end
+typesetters.tohpackfast = function(...) local h, b = tohpackfast(...) return tonode(h), b end
+typesetters.tovpack = function(...) local h, b = tovpack (...) return tonode(h), b end
+typesetters.tovpackfast = function(...) local h, b = tovpackfast(...) return tonode(h), b end
+
+typesetters.hpack = typesetters.tohpack -- obsolete
+typesetters.fast_hpack = typesetters.tofasthpack -- obsolete
+typesetters.vpack = typesetters.tovpack -- obsolete
+
+-- node.write(nodes.typestters.hpack("Hello World!"))
+-- node.write(nodes.typestters.hpack("Hello World!",1,100*1024*10))
+
+string.tonodes = function(...) return tonode(tonodes(...)) end -- quite convenient
diff --git a/Master/texmf-dist/tex/context/base/norm-ltx.mkii b/Master/texmf-dist/tex/context/base/norm-ltx.mkii
index cd02cffec95..9a8f7ba576c 100644
--- a/Master/texmf-dist/tex/context/base/norm-ltx.mkii
+++ b/Master/texmf-dist/tex/context/base/norm-ltx.mkii
@@ -155,7 +155,7 @@
\let \normallatelua = \latelua
\let \normalluaescapestring = \luaescapestring
\let \normalluastartup = \luastartup
-\let \normalluatexdatestamp = \luatexdatestamp
+%let \normalluatexdatestamp = \luatexdatestamp
\let \normalluatexrevision = \luatexrevision
\let \normalluatexversion = \luatexversion
\let \normalnokerns = \nokerns
diff --git a/Master/texmf-dist/tex/context/base/pack-com.mkiv b/Master/texmf-dist/tex/context/base/pack-com.mkiv
index 6c1363148b9..4ca77af1c45 100644
--- a/Master/texmf-dist/tex/context/base/pack-com.mkiv
+++ b/Master/texmf-dist/tex/context/base/pack-com.mkiv
@@ -267,10 +267,13 @@
\edef\p_pack_combinations_alternative{\combinationparameter\c!alternative}%
\to \everydefinecombination
-\def\pack_combinations_pickup % we want to add struts but still ignore an empty box
+\def\pack_combinations_pickup
{\dostarttagged\t!combinationpair\empty
\dostarttagged\t!combinationcontent\empty
- \dowithnextboxcs\pack_combinations_pickup_content\hbox}
+ \assumelongusagecs\pack_combinations_pickup_content_indeed}
+
+\def\pack_combinations_pickup_content_indeed
+ {\dowithnextboxcs\pack_combinations_pickup_content\hbox}
\def\pack_combinations_pickup_content % we want to add struts but still ignore an empty box
{\dostoptagged
@@ -279,15 +282,21 @@
\expandnamespacemacro\??combinationalternative\p_pack_combinations_alternative\v!text}
\setvalue{\??combinationalternative\v!text}%
+ {\assumelongusagecs\pack_combinations_alternative_text_indeed}
+
+\setvalue{\??combinationalternative\v!label}%
+ {\assumelongusagecs\pack_combinations_alternative_label_indeed}
+
+\def\pack_combinations_alternative_text_indeed
{\dowithnextboxcs\pack_combinations_pickup_caption\vtop\bgroup
\afterassignment\pack_combinations_caption_first
\let\nexttoken=}
-\setvalue{\??combinationalternative\v!label}%
+\def\pack_combinations_alternative_label_indeed
{\dowithnextboxcs\pack_combinations_pickup_caption\vtop\bgroup
\hsize\wd\b_pack_combinations_content
\ifx\p_align\empty\else\setupalign[\p_align]\fi
- \usecombinationstyleandcolor\c!style\c!color % but label style wins, so independent configuration
+ \usecombinationstyleandcolor\c!style\c!color
\begstrut
\normalexpanded{\strc_labels_command{\v!combination\ifx\currentcombination\empty\else:\currentcombination\fi}}%
\endstrut
@@ -617,12 +626,12 @@
\unexpanded\def\placepairedbox[#1]%
{\bgroup
\edef\currentpairedbox{#1}%
- \doifnextoptionalelse\pack_pairedboxes_place\pack_pairedboxes_place_indeed}
+ \doifnextoptionalcselse\pack_pairedboxes_place\pack_pairedboxes_place_indeed}
\unexpanded\def\startplacepairedbox[#1]%
{\bgroup
\edef\currentpairedbox{#1}%
- \doifnextoptionalelse\pack_pairedboxes_place\pack_pairedboxes_place_indeed}
+ \doifnextoptionalcselse\pack_pairedboxes_place\pack_pairedboxes_place_indeed}
\unexpanded\def\stopplacepairedbox
{}
@@ -644,7 +653,7 @@
%
\global\setsystemmode{pairedbox}%
\pack_pairedboxes_before
- \dogotopar\pack_pairedboxes_first_pickup}
+ \assumelongusagecs\pack_pairedboxes_first_pickup}
\def\pack_pairedboxes_first_pickup
{\dowithnextboxcs\pack_pairedboxes_first\hbox
@@ -653,7 +662,7 @@
\def\pack_pairedboxes_first
{\pack_pairedboxes_between
- \dogotopar\pack_pairedboxes_second_pickup}
+ \assumelongusagecs\pack_pairedboxes_second_pickup}
\def\pack_pairedboxes_second_pickup
{\dowithnextboxcs\pack_pairedboxes_second\vbox
@@ -782,19 +791,32 @@
\setrigidcolumnhsize\hsize{\pairedboxparameter\c!distance}\p_n
\fi}
+% \def\pack_pairedboxes_between_horizontal
+% {\setlocalhsize
+% \hsize\wd\b_pack_pairedboxes_first % trick
+% \hsize\pairedboxparameter\c!width % can be \hsize
+% \scratchdimen\dimexpr\wd\b_pack_pairedboxes_first+\pairedboxparameter\c!distance\relax
+% \ifdim\dimexpr\hsize+\scratchdimen\relax>\pairedboxparameter\c!maxwidth\relax
+% \hsize\dimexpr\pairedboxparameter\c!maxwidth-\scratchdimen\relax
+% \fi}
+
\def\pack_pairedboxes_between_horizontal
- {\hsize\wd\b_pack_pairedboxes_first % trick
- \hsize\pairedboxparameter\c!width % can be \hsize
- \scratchdimen\dimexpr\wd\b_pack_pairedboxes_first+\pairedboxparameter\c!distance\relax
- \ifdim\dimexpr\hsize+\scratchdimen\relax>\pairedboxparameter\c!maxwidth\relax
- \hsize\dimexpr\pairedboxparameter\c!maxwidth-\scratchdimen\relax
+ {\scratchdistance\pairedboxparameter\c!distance
+ \scratchwidth\pairedboxparameter\c!maxwidth\relax
+ \setlocalhsize
+ \hsize\dimexpr\availablehsize-\wd\b_pack_pairedboxes_first-\scratchdistance\relax
+ \hsize\pairedboxparameter\c!width\relax % can be \hsize
+ \scratchdimen\dimexpr\wd\b_pack_pairedboxes_first+\scratchdistance\relax
+ \ifdim\dimexpr\hsize+\scratchdimen\relax>\scratchwidth
+ \hsize\dimexpr\scratchwidth-\scratchdimen\relax
\fi}
\def\pack_pairedboxes_between_vertical
- {\hsize\wd\b_pack_pairedboxes_first
- \hsize\pairedboxparameter\c!width % can be \hsize
- \ifdim\hsize>\pairedboxparameter\c!maxwidth\relax
- \hsize\pairedboxparameter\c!maxwidth % can be \hsize
+ {\scratchwidth\pairedboxparameter\c!maxwidth\relax
+ \hsize\wd\b_pack_pairedboxes_first
+ \hsize\pairedboxparameter\c!width\relax % can be \hsize
+ \ifdim\hsize>\scratchwidth\relax
+ \hsize\scratchwidth
\fi}
\def\pack_pairedboxes_after
@@ -811,7 +833,7 @@
\fi}
\def\pack_pairedboxes_pack_horizontal
- {\hbox\bgroup
+ {\dontleavehmode\hbox\bgroup
\forgetall
\s_pack_pairedboxes_size\ht
\ifdim\ht\b_pack_pairedboxes_first>\ht\b_pack_pairedboxes_second
@@ -832,7 +854,7 @@
\egroup}
\def\pack_pairedboxes_pack_vertical
- {\vbox\bgroup
+ {\dontleavehmode\vbox\bgroup
\forgetall
\s_pack_pairedboxes_size\wd
\ifdim\wd\b_pack_pairedboxes_first>\wd\b_pack_pairedboxes_second
diff --git a/Master/texmf-dist/tex/context/base/pack-mis.mkvi b/Master/texmf-dist/tex/context/base/pack-mis.mkvi
index 978cc120c96..38fcc18e419 100644
--- a/Master/texmf-dist/tex/context/base/pack-mis.mkvi
+++ b/Master/texmf-dist/tex/context/base/pack-mis.mkvi
@@ -46,7 +46,7 @@
\unexpanded\def\pack_placement#tag%
{\bgroup
\edef\currentplacement{#tag}%
- \doifnextoptionalelse\pack_placement_yes\pack_placement_nop}
+ \doifnextoptionalcselse\pack_placement_yes\pack_placement_nop}
\def\pack_placement_yes[#settings]%
{\setupcurrentplacement[#settings]%
diff --git a/Master/texmf-dist/tex/context/base/pack-mrl.mkiv b/Master/texmf-dist/tex/context/base/pack-mrl.mkiv
index 7c3f08825b4..3e81a4d6944 100644
--- a/Master/texmf-dist/tex/context/base/pack-mrl.mkiv
+++ b/Master/texmf-dist/tex/context/base/pack-mrl.mkiv
@@ -40,7 +40,7 @@
\unexpanded\def\blackrule
{\hbox\bgroup
- \doifnextoptionalelse\pack_black_rule_pickup\pack_black_rule_indeed}
+ \doifnextoptionalcselse\pack_black_rule_pickup\pack_black_rule_indeed}
\def\pack_black_rule_pickup[#1]%
{\setupcurrentblackrules[#1]%
@@ -96,7 +96,7 @@
\unexpanded\def\blackrules % probably never used
{\hbox\bgroup
- \doifnextoptionalelse\pack_black_rules_pickup\pack_black_rules_indeed}
+ \doifnextoptionalcselse\pack_black_rules_pickup\pack_black_rules_indeed}
\def\pack_black_rules_pickup[#1]%
{\setupcurrentblackrules[#1]%
diff --git a/Master/texmf-dist/tex/context/base/pack-obj.lua b/Master/texmf-dist/tex/context/base/pack-obj.lua
index 1e4e0f59e5b..70876a34688 100644
--- a/Master/texmf-dist/tex/context/base/pack-obj.lua
+++ b/Master/texmf-dist/tex/context/base/pack-obj.lua
@@ -13,8 +13,7 @@ reusable components.
local commands, context = commands, context
-local texcount = tex.count
-local allocate = utilities.storage.allocate
+local allocate = utilities.storage.allocate
local collected = allocate()
local tobesaved = allocate()
diff --git a/Master/texmf-dist/tex/context/base/pack-rul.lua b/Master/texmf-dist/tex/context/base/pack-rul.lua
index a990936e7d2..5796da800b0 100644
--- a/Master/texmf-dist/tex/context/base/pack-rul.lua
+++ b/Master/texmf-dist/tex/context/base/pack-rul.lua
@@ -10,16 +10,34 @@ if not modules then modules = { } end modules ['pack-rul'] = {
An explanation is given in the history document mk.
--ldx]]--
-local texsetdimen, texsetcount, texbox = tex.setdimen, tex.setcount, tex.box
-local hpack, free, copy, traverse_id = node.hpack, node.free, node.copy_list, node.traverse_id
-local texdimen, texcount = tex.dimen, tex.count
+-- we need to be careful with display math as it uses shifts
+-- challenge: adapt glue_set
+-- setfield(h,"glue_set", getfield(h,"glue_set") * getfield(h,"width")/maxwidth -- interesting ... doesn't matter much
local hlist_code = nodes.nodecodes.hlist
+local vlist_code = nodes.nodecodes.vlist
local box_code = nodes.listcodes.box
-local node_dimensions = node.dimensions
+local line_code = nodes.listcodes.line
+
+local texsetdimen = tex.setdimen
+local texsetcount = tex.setcount
+
+local nuts = nodes.nuts
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getlist = nuts.getlist
+local getsubtype = nuts.getsubtype
+local getbox = nuts.getbox
+
+local hpack = nuts.hpack
+local traverse_id = nuts.traverse_id
+local node_dimensions = nuts.dimensions
function commands.doreshapeframedbox(n)
- local box = texbox[n]
+ local box = getbox(n)
local noflines = 0
local firstheight = nil
local lastdepth = nil
@@ -27,21 +45,28 @@ function commands.doreshapeframedbox(n)
local minwidth = 0
local maxwidth = 0
local totalwidth = 0
- if box.width ~= 0 then
- local list = box.list
+ local averagewidth = 0
+ local boxwidth = getfield(box,"width")
+ if boxwidth ~= 0 then -- and h.subtype == vlist_code
+ local list = getlist(box)
if list then
- for h in traverse_id(hlist_code,list) do -- no dir etc needed
+ local function check(n,repack)
if not firstheight then
- firstheight = h.height
+ firstheight = getfield(n,"height")
end
- lastdepth = h.depth
+ lastdepth = getfield(n,"depth")
noflines = noflines + 1
- local l = h.list
+ local l = getlist(n)
if l then
- if h.subtype == box_code then -- maybe more
- lastlinelength = h.width
+ if repack then
+ local subtype = getsubtype(n)
+ if subtype == box_code or subtype == line_code then
+ lastlinelength = node_dimensions(l,getfield(n,"dir")) -- used to be: hpack(copy(l)).width
+ else
+ lastlinelength = getfield(n,"width")
+ end
else
- lastlinelength = node_dimensions(l) -- used to be: hpack(copy(l)).width
+ lastlinelength = getfield(n,"width")
end
if lastlinelength > maxwidth then
maxwidth = lastlinelength
@@ -52,58 +77,78 @@ function commands.doreshapeframedbox(n)
totalwidth = totalwidth + lastlinelength
end
end
- if firstheight then
- if maxwidth ~= 0 then
+ local hdone = false
+ for h in traverse_id(hlist_code,list) do -- no dir etc needed
+ check(h,true)
+ hdone = true
+ end
+ -- local vdone = false
+ for v in traverse_id(vlist_code,list) do -- no dir etc needed
+ check(v,false)
+ -- vdone = true
+ end
+ if not firstheight then
+ -- done
+ elseif maxwidth ~= 0 then
+ if hdone then
for h in traverse_id(hlist_code,list) do
- local l = h.list
+ local l = getlist(h)
if l then
- if h.subtype == box_code then
- -- explicit box, no 'line'
- else
- -- if h.width ~= maxwidth then -- else no display math handling (uses shift)
- -- challenge: adapt glue_set
- -- h.glue_set = h.glue_set * h.width/maxwidth -- interesting ... doesn't matter much
- -- h.width = maxwidth
- h.list = hpack(l,maxwidth,'exactly',h.dir)
- h.shift = 0 -- needed for display math
- h.width = maxwidth
- -- end
+ local subtype = getsubtype(h)
+ if subtype == box_code or subtype == line_code then
+ l = hpack(l,maxwidth,'exactly',getfield(h,"dir")) -- multiple return values
+ setfield(h,"list",l)
+ setfield(h,"shift",0) -- needed for display math, so no width check possible
end
+ setfield(h,"width",maxwidth)
end
end
end
- box.width = maxwidth
+ -- if vdone then
+ -- for v in traverse_id(vlist_code,list) do
+ -- local width = getfield(n,"width")
+ -- if width > maxwidth then
+ -- setfield(v,"width",maxwidth)
+ -- end
+ -- end
+ -- end
+ setfield(box,"width",maxwidth)
+ averagewidth = noflines > 0 and totalwidth/noflines or 0
end
end
end
- -- print("reshape", noflines, firstheight or 0, lastdepth or 0)
- texsetcount("global","framednoflines", noflines)
- texsetdimen("global","framedfirstheight", firstheight or 0)
- texsetdimen("global","framedlastdepth", lastdepth or 0)
- texsetdimen("global","framedminwidth", minwidth)
- texsetdimen("global","framedmaxwidth", maxwidth)
- texsetdimen("global","framedaveragewidth", noflines > 0 and totalwidth/noflines or 0)
+ texsetcount("global","framednoflines",noflines)
+ texsetdimen("global","framedfirstheight",firstheight or 0) -- also signal
+ texsetdimen("global","framedlastdepth",lastdepth or 0)
+ texsetdimen("global","framedminwidth",minwidth)
+ texsetdimen("global","framedmaxwidth",maxwidth)
+ texsetdimen("global","framedaveragewidth",averagewidth)
end
function commands.doanalyzeframedbox(n)
- local box = texbox[n]
+ local box = getbox(n)
local noflines = 0
local firstheight = nil
local lastdepth = nil
- if box.width ~= 0 then
- local list = box.list
+ if getfield(box,"width") ~= 0 then
+ local list = getlist(box)
if list then
- for h in traverse_id(hlist_code,list) do
+ local function check(n)
if not firstheight then
- firstheight = h.height
+ firstheight = getfield(n,"height")
end
- lastdepth = h.depth
+ lastdepth = getfield(n,"depth")
noflines = noflines + 1
end
+ for h in traverse_id(hlist_code,list) do
+ check(h)
+ end
+ for v in traverse_id(vlist_code,list) do
+ check(v)
+ end
end
end
- -- print("analyze", noflines, firstheight or 0, lastdepth or 0)
- texsetcount("global","framednoflines", noflines)
- texsetdimen("global","framedfirstheight", firstheight or 0)
- texsetdimen("global","framedlastdepth", lastdepth or 0)
+ texsetcount("global","framednoflines",noflines)
+ texsetdimen("global","framedfirstheight",firstheight or 0)
+ texsetdimen("global","framedlastdepth",lastdepth or 0)
end
diff --git a/Master/texmf-dist/tex/context/base/pack-rul.mkiv b/Master/texmf-dist/tex/context/base/pack-rul.mkiv
index f2b332b14c8..8fcf8f548d2 100644
--- a/Master/texmf-dist/tex/context/base/pack-rul.mkiv
+++ b/Master/texmf-dist/tex/context/base/pack-rul.mkiv
@@ -100,11 +100,11 @@
\expandafter\noexpand\csname do#1rootparameter\endcsname
\expandafter\noexpand\csname setupcurrent#1\endcsname
\expandafter\noexpand\csname inherited#1framed\endcsname
- \noexpand\??framed}} % if needed we can have a variant
+ \expandafter\noexpand\csname inherited#1framedbox\endcsname}} % new
\unexpanded\def\doinstallinheritedframed#1#2#3#4#5#6#7#8#9%
{\def#5##1##2{\ifx##1\relax#6{##2}\else#4{##1}{##2}\fi}%
- \def#6##1{\ifcsname#9:##1\endcsname#9:##1\else\s!empty\fi}% root
+ \def#6##1{\ifcsname\??framed:##1\endcsname\??framed:##1\else\s!empty\fi}% root
\unexpanded\def#8%
{\bgroup
\bgroup
@@ -113,7 +113,15 @@
\let\framedparameter #2%
\let\framedparameterhash#3%
\let\setupcurrentframed #7%
- \pack_framed_process_indeed}}
+ \pack_framed_process_indeed}%
+ \unexpanded\def#9%
+ {\bgroup
+ \inframedtrue
+ \let\currentframed #1%
+ \let\framedparameter #2%
+ \let\framedparameterhash#3%
+ \let\setupcurrentframed #7%
+ \pack_framed_process_box_indeed}}
\unexpanded\def\installframedcommandhandler#1#2#3%
{\installcommandhandler{#1}{#2}{#3}%
@@ -880,11 +888,8 @@
%D The next macro uses a box and takes its natural width and height so these
%D can better be correct.
-\unexpanded\def\localbackgroundframed#1#2#3% namespace component box
- {\bgroup
- \edef\currentframed{#1}%
- \pack_framed_initialize
- \setbox\b_framed_normal\box#3%
+\unexpanded\def\pack_framed_process_box_indeed#1#2% component box (assumes parameters set and grouped usage)
+ {\setbox\b_framed_normal\box#2% could actually be \let\b_framed_normal#2
\edef\p_framed_region{\framedparameter\c!region}%
\ifx\p_framed_region\v!yes % maybe later named
\pack_framed_add_region
@@ -907,12 +912,18 @@
\else
\p_framed_backgroundoffset
\fi
- \edef\p_framed_component{#2}%
+ \edef\p_framed_component{#1}%
\pack_framed_add_background
\fi
\box\b_framed_normal
\egroup}
+\unexpanded\def\localbackgroundframed#1% namespace component box
+ {\bgroup
+ \edef\currentframed{#1}%
+ \pack_framed_initialize
+ \pack_framed_process_box_indeed} % group ends here
+
\let\postprocessframebox\relax
%D A nice example by Aditya:
@@ -1638,27 +1649,41 @@
% \framed[width=12cm,height=3cm,orientation=-180]{\input ward\relax}
% \framed[width=12cm,height=3cm,orientation=-270]{\input ward\relax}
+% \def\pack_framed_start_orientation
+% {\ifcase\p_framed_orientation
+% \let\pack_framed_stop_orientation\relax
+% \else
+% \scratchcounter\p_framed_orientation % weird .. why
+% \divide\scratchcounter\plustwo
+% \ifodd\scratchcounter
+% \let\pack_framed_stop_orientation\pack_framed_stop_orientation_odd
+% \else
+% \let\pack_framed_stop_orientation\pack_framed_stop_orientation_even
+% \fi
+% \fi}
+%
+% \def\pack_framed_stop_orientation_odd
+% {\swapmacros\framedwidth\framedheight
+% \swapmacros\localwidth\localheight
+% \swapdimens\d_framed_height\d_framed_width
+% \pack_framed_stop_orientation_even}
+%
+% \def\pack_framed_stop_orientation_even
+% {\setbox\b_framed_normal\hbox{\dorotatebox\p_framed_orientation\hbox{\box\b_framed_normal}}%
+% \d_framed_height\ht\b_framed_normal
+% \d_framed_width \wd\b_framed_normal}
+
\def\pack_framed_start_orientation
{\ifcase\p_framed_orientation
\let\pack_framed_stop_orientation\relax
\else
- \scratchcounter\p_framed_orientation % weird .. why
- \divide\scratchcounter\plustwo
- \ifodd\scratchcounter
- \let\pack_framed_stop_orientation\pack_framed_stop_orientation_odd
- \else
- \let\pack_framed_stop_orientation\pack_framed_stop_orientation_even
- \fi
+ \let\pack_framed_stop_orientation\pack_framed_stop_orientation_indeed
\fi}
-\def\pack_framed_stop_orientation_odd
- {\swapmacros\framedwidth\framedheight
- \swapmacros\localwidth\localheight
- \swapdimens\d_framed_height\d_framed_width
- \pack_framed_stop_orientation_even}
-
-\def\pack_framed_stop_orientation_even
- {\setbox\b_framed_normal\hbox{\dorotatebox\p_framed_orientation\hbox{\box\b_framed_normal}}}
+\def\pack_framed_stop_orientation_indeed
+ {\setbox\b_framed_normal\hbox{\dorotatebox\p_framed_orientation\hbox{\box\b_framed_normal}}%
+ \d_framed_height\ht\b_framed_normal
+ \d_framed_width \wd\b_framed_normal}
%D The last conditional takes care of the special situation of in||line \inframed
%D [height=3cm] {framed} boxes. Such boxes have to be \inframed {aligned} with the
@@ -1738,17 +1763,45 @@
\edef\currentmathframed{#1}%
\dosingleempty\pack_framed_mathframed_indeed}
+% \def\pack_framed_mathframed_indeed[#1]#2% no fancy nesting supported here
+% {\iffirstargument
+% \setupcurrentmathframed[#1]%
+% \fi
+% \c_framed_mstyle\mathstyle
+% \doifnot{\mathframedparameter\c!location}\v!low{\let\normalstrut\pack_framed_math_strut}%
+% \inheritedmathframedframed{\Ustartmath\triggermathstyle\c_framed_mstyle#2\Ustopmath}%
+% \endgroup}
+
+\newcount\c_pack_framed_mc
+
+\def\pack_framed_math_pos
+ {\global\advance\c_pack_framed_mc\plusone
+ \xdef\pack_framed_mc_one{mcf:1:\number\c_pack_framed_mc}%
+ \xdef\pack_framed_mc_two{mcf:2:\number\c_pack_framed_mc}%
+ \xypos\pack_framed_mc_two}
+
\def\pack_framed_mathframed_indeed[#1]#2% no fancy nesting supported here
{\iffirstargument
\setupcurrentmathframed[#1]%
\fi
\c_framed_mstyle\mathstyle
- \doifnot{\mathframedparameter\c!location}\v!low{\let\normalstrut\pack_framed_math_strut}%
+ \edef\m_framed_location{\mathframedparameter\c!location}%
+ \ifx\m_framed_location\v!mathematics
+ \let\normalstrut\pack_framed_math_pos
+ \else\ifx\m_framed_location\v!low\else
+ \let\normalstrut\pack_framed_math_strut
+ \fi\fi
\inheritedmathframedframed{\Ustartmath\triggermathstyle\c_framed_mstyle#2\Ustopmath}%
\endgroup}
+\installframedlocator \v!mathematics
+ {}
+ {\lower\dimexpr\MPy\pack_framed_mc_two-\MPy\pack_framed_mc_one\relax
+ \hbox{\xypos\pack_framed_mc_one\box\b_framed_normal}}
+
\definemathframed[mframed]
\definemathframed[inmframed][\c!location=\v!low]
+\definemathframed[mcframed] [\c!location=\v!mathematics]
%D So instead of the rather versatile \type {\framed}, we use \type {\mframed}:
%D
@@ -1927,6 +1980,19 @@
%D The handlers:
+% Beware, we have a \noindent so an empty line is indeed an empty line and
+% the \synchronizeinlinedirection triggers a vbox instead of a line.
+%
+% \startTEXpage[offset=0.5ex,align={lohi,middle}]
+%
+% \vbox{\hbox{x}}
+% \stopTEXpage
+%
+% \startTEXpage[offset=0.5ex,align={lohi,middle}]
+% \vbox{\hbox{x}}
+% \stopTEXpage
+
+
% \def\pack_framed_forgetall{\forgetall}
\def\pack_framed_set_foregroundcolor
@@ -1953,6 +2019,7 @@
\raggedcommand
\pack_framed_do_top
\bgroup
+\synchronizeinlinedirection
\localbegstrut
\aftergroup\localendstrut
\aftergroup\pack_framed_do_bottom
@@ -1974,6 +2041,7 @@
\raggedcenter
\vss
\bgroup
+\synchronizeinlinedirection
\localbegstrut
\aftergroup\localendstrut
\aftergroup\vss
@@ -1996,6 +2064,7 @@
\aftergroup\localendstrut
\aftergroup\vss
\aftergroup\egroup
+\synchronizeinlinedirection
\localbegstrut
\doformatonelinerbox}
@@ -2013,6 +2082,7 @@
\raggedcommand
\pack_framed_do_top
\bgroup
+\synchronizeinlinedirection
\localbegstrut
\aftergroup\localendstrut
\aftergroup\pack_framed_do_bottom
@@ -2036,6 +2106,7 @@
\hbox
\bgroup
\aftergroup\egroup
+\synchronizeinlinedirection
\localstrut
\doformatonelinerbox}
@@ -2049,6 +2120,7 @@
\fi
\pack_framed_do_setups
\hss
+\synchronizeinlinedirection
\localstrut
\bgroup
\aftergroup\hss
@@ -2063,6 +2135,7 @@
\fi
\let\postprocessframebox\relax
\pack_framed_do_setups
+\synchronizeinlinedirection
\localstrut
\doformatonelinerbox}
@@ -2491,10 +2564,25 @@
\inheritedframedtextframed\bgroup
\let\\=\endgraf
\framedtextparameter\c!inner % oud spul
- \doif{\framedtextparameter\c!depthcorrection}\v!on\pack_framed_text_start_depth_correction
+ \edef\p_framed_text_depthcorrection{\framedtextparameter\c!depthcorrection}%
+ \ifx\p_framed_text_depthcorrection\v!on
+ \pack_framed_text_start_depth_correction
+ \else
+ \bgroup
+ \fi
+\vskip-\strutdp % brrr why is this needed ... needs to be sorted out, see testcase 1
\doinhibitblank
\setupindenting[\framedtextparameter\c!indenting]%
- \useframedtextstyleandcolor\c!style\c!color}
+ \useframedtextstyleandcolor\c!style\c!color
+ \ignorespaces}
+
+% testcase 1:
+%
+% \showstruts
+% \startframedtext[align={normal,tolerant},offset=0pt] \input tufte \stopframedtext
+% \startframedtext[align={normal,tolerant},offset=0pt,depthcorrection=off] \input tufte \stopframedtext
+% \startframedtext[align={normal,tolerant},offset=0pt,depthcorrection=off] \inframed{x} \stopframedtext
+% \framed[align={normal,tolerant},offset=0pt]{\input tufte }
%D The \type {none} option is handy for nested usage, as in the presentation
%D styles, where we don't want interference.
@@ -2504,7 +2592,11 @@
\unexpanded\def\pack_framed_text_stop % no \baselinecorrection, see faq docs
{\endgraf
\removelastskip
- \doif{\framedtextparameter\c!depthcorrection}\v!on\pack_framed_text_stop_depth_correction
+ \ifx\p_framed_text_depthcorrection\v!on
+ \pack_framed_text_stop_depth_correction
+ \else
+ \egroup
+ \fi
\stopboxedcontent
\ifconditional\c_framed_text_location_none
\egroup
diff --git a/Master/texmf-dist/tex/context/base/page-app.mkiv b/Master/texmf-dist/tex/context/base/page-app.mkiv
index 5f1c2f297b6..e4858d48ffb 100644
--- a/Master/texmf-dist/tex/context/base/page-app.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-app.mkiv
@@ -106,7 +106,7 @@
\fi
\d_page_fitting_width \wd\b_page_fitting
\d_page_fitting_height\ht\b_page_fitting
- \startlocallayout % still valid?
+ \startlocallayout % hm, we change the papersize so we still need it
\let\checkcurrentlayout\relax % else interference with odd/even layout
\processaction
[\fittingpageparameter\c!paper]
@@ -117,7 +117,7 @@
\startmakeup[fittingpage]%
\box\b_page_fitting
\stopmakeup
- \stoplocallayout % still valid?
+ \stoplocallayout % we need to get rid of this and use the built-in
\egroup
\autostoptext}
diff --git a/Master/texmf-dist/tex/context/base/page-brk.mkiv b/Master/texmf-dist/tex/context/base/page-brk.mkiv
index ac1fffd6b1c..11dc04bfda5 100644
--- a/Master/texmf-dist/tex/context/base/page-brk.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-brk.mkiv
@@ -228,7 +228,7 @@
\installpagebreakmethod \v!quadruple % not yet ok inside columnsets
{\ifdoublesided
- \ifnum\numexpr\realpageno/4\relax=\numexpr\realpageno/2\relax\else
+ \ifnum\numexpr\realpageno/\plusfour\relax=\numexpr\realpageno/\plustwo\relax\else
\page_breaks_handle_direct\v!yes
\page_breaks_handle_direct\v!empty
\page_breaks_handle_direct\v!empty
@@ -316,96 +316,215 @@
%D Test page breaks.
-\newdimen \d_page_tests_test
-\newconstant\c_page_tests_mode
+% \newdimen \d_page_tests_test
+% \newconstant\c_page_tests_mode
-\newconstant\testpagemethod % todo: \testnewpage[method=,lines=,voffset=]
-\newconstant\testpagetrigger
+\newconstant\testpagemethod % old
+\newconstant\testpagetrigger % old
-\unexpanded\def\testpage {\c_page_tests_mode\plusone \dodoubleempty\page_tests_test} %
-\unexpanded\def\testpageonly{\c_page_tests_mode\plustwo \dodoubleempty\page_tests_test} % no penalties added to the mvl
-\unexpanded\def\testpagesync{\c_page_tests_mode\plusthree\dodoubleempty\page_tests_test} % force sync
-
-\def\page_tests_test[#1][#2]% don't change, only add more methods
+% \unexpanded\def\testpage {\c_page_tests_mode\plusone \dodoubleempty\page_tests_test} %
+% \unexpanded\def\testpageonly{\c_page_tests_mode\plustwo \dodoubleempty\page_tests_test} % no penalties added to the mvl
+% \unexpanded\def\testpagesync{\c_page_tests_mode\plusthree\dodoubleempty\page_tests_test} % force sync
+%
+% \def\page_tests_test[#1][#2]% don't change, only add more methods
+% {\relax % needed before \if
+% \ifconditional\c_page_breaks_enabled
+% % new from here
+% \ifcase\testpagetrigger
+% \endgraf
+% \or\ifvmode
+% \dosomebreak\allowbreak
+% \else % indeed?
+% \vadjust{\allowbreak}%
+% \endgraf
+% \fi\fi
+% % till here
+% \ifdim\pagegoal<\maxdimen \relax
+% \ifdim\pagetotal<\pagegoal \relax
+% \d_page_tests_test\dimexpr
+% #1\lineheight
+% +\pagetotal
+% \ifdim\lastskip<\parskip+\parskip\fi
+% \ifsecondargument+#2\fi
+% \relax
+% \ifcase\testpagemethod
+% \ifdim\d_page_tests_test>.99\pagegoal
+% \penalty-\plustenthousand
+% \fi
+% \or
+% \ifdim\dimexpr\d_page_tests_test-\pagegoal\relax>-\lineheight
+% \penalty-\plustenthousand
+% \fi
+% \or
+% \getnoflines\pagegoal
+% \ifdim\dimexpr\d_page_tests_test-\noflines\lineheight\relax>-\lineheight
+% \penalty-\plustenthousand
+% \fi
+% \or % same as 0 but more accurate
+% \ifdim\dimexpr\d_page_tests_test-10\scaledpoint\relax>\pagegoal
+% \penalty-\plustenthousand
+% \fi
+% \fi
+% \else\ifnum\c_page_tests_mode=\plusthree
+% \page_tests_flush_so_far
+% \fi\fi
+% \else\ifnum\c_page_tests_mode=\plusone
+% \goodbreak
+% \fi\fi
+% \else
+% \endgraf
+% \fi}
+%
+% \def\page_tests_flush_so_far
+% {\endgraf
+% \ifdim\pagetotal>\pagegoal
+% \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
+% \goodbreak
+% \else
+% \page
+% \fi
+% \fi}
+
+\installcorenamespace {pagechecker}
+\installcorenamespace {pagecheckermethod}
+
+\installcommandhandler \??pagechecker {pagechecker} \??pagechecker
+
+\setuppagechecker
+ [\c!method=1,
+ \c!before=,
+ \c!after=,
+ \c!inbetween=,
+ \c!lines=\plusthree,
+ \c!offset=\zeropoint]
+
+\def\page_check_amount
+ {\dimexpr
+ \pagecheckerparameter\c!lines\lineheight
+ +\pagetotal
+ \ifdim\lastskip<\parskip+\parskip\fi
+ +\pagecheckerparameter\c!offset
+ \relax}
+
+\unexpanded\def\checkpage
+ {\dodoubleempty\page_check}
+
+\def\page_check[#1][#2]%
{\relax % needed before \if
+ \endgraf
\ifconditional\c_page_breaks_enabled
- % new from here
- \ifcase\testpagetrigger
- \endgraf
- \or\ifvmode
- \dosomebreak\allowbreak
- \else % indeed?
- \vadjust{\allowbreak}%
- \endgraf
- \fi\fi
- % till here
- \ifdim\pagegoal<\maxdimen \relax
- \ifdim\pagetotal<\pagegoal \relax
- \d_page_tests_test\dimexpr
- #1\lineheight
- +\pagetotal
- \ifdim\lastskip<\parskip+\parskip\fi
- \ifsecondargument+#2\fi
- \relax
- \ifcase\testpagemethod
- \ifdim\d_page_tests_test>.99\pagegoal
- \penalty-\plustenthousand
- \fi
- \or
- \ifdim\dimexpr\d_page_tests_test-\pagegoal\relax>-\lineheight
- \penalty-\plustenthousand
- \fi
- \or
- \getnoflines\pagegoal
- \ifdim\dimexpr\d_page_tests_test-\noflines\lineheight\relax>-\lineheight
- \penalty-\plustenthousand
- \fi
- \or % same as 0 but more accurate
- \ifdim\dimexpr\d_page_tests_test-10\scaledpoint\relax>\pagegoal
- \penalty-\plustenthousand
- \fi
- \fi
- \else\ifnum\c_page_tests_mode=\plusthree
- \page_tests_flush_so_far
- \fi\fi
- \else\ifnum\c_page_tests_mode=\plusone
- \goodbreak
- \fi\fi
+ \begingroup
+ \edef\currentpagechecker{#1}%
+ \ifsecondargument\setupcurrentpagechecker[#2]\fi
+ \csname\??pagecheckermethod\pagecheckerparameter\c!method\endcsname
+ \endgroup
+ \fi}
+
+\setvalue{\??pagecheckermethod 0}%
+ {\ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \ifdim\page_check_amount>.99\pagegoal
+ \pagecheckerparameter\c!before
+ \penalty-\plustenthousand
+ \pagecheckerparameter\c!after
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
\else
- \endgraf
+ \pagecheckerparameter\c!inbetween
\fi}
-\def\page_tests_flush_so_far
- {\endgraf
- \ifdim\pagetotal>\pagegoal
- \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
- \goodbreak
+\setvalue{\??pagecheckermethod 1}%
+ {\ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \ifdim\dimexpr\page_check_amount-\pagegoal\relax>-\lineheight
+ \pagecheckerparameter\c!before
+ \penalty-\plustenthousand
+ \pagecheckerparameter\c!after
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
\else
- \page
+ \pagecheckerparameter\c!inbetween
\fi
+ \else
+ \goodbreak
+ \pagecheckerparameter\c!inbetween
\fi}
+\setvalue{\??pagecheckermethod 2}%
+ {\ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \getnoflines\pagegoal
+ \ifdim\dimexpr\page_check_amount-\noflines\lineheight\relax>-\lineheight
+ \pagecheckparameter\c!before
+ \penalty-\plustenthousand
+ \pagecheckerparameter\c!after
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi}
+
+\setvalue{\??pagecheckermethod 3}%
+ {\ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \ifdim\dimexpr\page_check_amount-10\scaledpoint\relax>\pagegoal
+ \pagecheckerparameter\c!before
+ \penalty-\plustenthousand
+ \pagecheckerparameter\c!after
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \else
+ \ifdim\pagetotal>\pagegoal
+ \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
+ \goodbreak
+ \pagecheckerparameter\c!inbetween
+ \else
+ \pagecheckerparameter\c!before
+ \page
+ \pagecheckerparameter\c!after
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi}
+
+\definepagechecker[\s!unknown:0] [\c!method=0,\c!before=,\c!after=,\c!inbetween=]
+\definepagechecker[\s!unknown:1][\s!unknown:0][\c!method=1]
+\definepagechecker[\s!unknown:2][\s!unknown:0][\c!method=2]
+\definepagechecker[\s!unknown:3][\s!unknown:0][\c!method=3]
+
+\def\page_tests_test_a[#1][#2]{\normalexpanded{\checkpage[\s!unknown:1][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
+\def\page_tests_test_b[#1][#2]{\normalexpanded{\checkpage[\s!unknown:2][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
+\def\page_tests_test_c[#1][#2]{\normalexpanded{\checkpage[\s!unknown:3][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
+
+\unexpanded\def\testpage {\dodoubleempty\page_tests_test_a} %
+\unexpanded\def\testpageonly{\dodoubleempty\page_tests_test_b} % no penalties added to the mvl
+\unexpanded\def\testpagesync{\dodoubleempty\page_tests_test_c} % force sync
+
%D Test column breaks.
-\def\testcolumn
+\unexpanded\def\testcolumn
{\dodoubleempty\page_tests_columns_test}
\def\page_tests_columns_test[#1][#2]%
- {\endgraf
- \ifdim\pagegoal<\maxdimen
- \ifdim\pagetotal<\pagegoal
- \d_page_tests_test\dimexpr
- \pagegoal
- -\pagetotal
- \ifdim\lastskip<\parskip+\parskip\fi
- \ifsecondargument+#2\fi
- \relax
- \getrawnoflines\d_page_tests_test % (raw)
- \ifnum#1>\noflines
- \column
- \fi
+ {\ifdefined\page_otr_command_test_column
+ \ifsecondargument
+ \page_otr_command_test_column[#1][#2]%
\else
- \penalty-\plustenthousand % (untested)
+ \page_otr_command_test_column[#1][\zeropoint]%
\fi
\fi}
diff --git a/Master/texmf-dist/tex/context/base/page-flt.lua b/Master/texmf-dist/tex/context/base/page-flt.lua
index ab7a534eb15..7b1afc55cf3 100644
--- a/Master/texmf-dist/tex/context/base/page-flt.lua
+++ b/Master/texmf-dist/tex/context/base/page-flt.lua
@@ -11,9 +11,6 @@ if not modules then modules = { } end modules ['page-flt'] = {
local insert, remove = table.insert, table.remove
local find = string.find
-local setdimen, setcount, texbox = tex.setdimen, tex.setcount, tex.box
-
-local copy_node_list = node.copy_list
local trace_floats = false trackers.register("graphics.floats", function(v) trace_floats = v end) -- name might change
@@ -24,10 +21,23 @@ local C, S, P, lpegmatch = lpeg.C, lpeg.S, lpeg.P, lpeg.match
-- we use floatbox, floatwidth, floatheight
-- text page leftpage rightpage (todo: top, bottom, margin, order)
-floats = floats or { }
-local floats = floats
+local copy_node_list = node.copy_list
+local flush_node_list = node.flush_list
+local copy_node = node.copy
+
+local setdimen = tex.setdimen
+local setcount = tex.setcount
+local texgetbox = tex.getbox
+local texsetbox = tex.setbox
+local textakebox = nodes.takebox
-local noffloats, last, default, pushed = 0, nil, "text", { }
+floats = floats or { }
+local floats = floats
+
+local noffloats = 0
+local last = nil
+local default = "text"
+local pushed = { }
local function initialize()
return {
@@ -98,21 +108,20 @@ end
function floats.save(which,data)
which = which or default
- local b = texbox.floatbox
+ local b = textakebox("floatbox")
if b then
local stack = stacks[which]
noffloats = noffloats + 1
- local w, h, d = b.width, b.height, b.depth
local t = {
n = noffloats,
data = data or { },
- box = copy_node_list(b),
+ box = b,
}
- texbox.floatbox = nil
insert(stack,t)
setcount("global","savednoffloats",#stacks[default])
if trace_floats then
- report_floats("%s, category %a, number %a, slot %a, width %p, height %p, depth %p","saving",which,noffloats,#stack,w,h,d)
+ report_floats("%s, category %a, number %a, slot %a, width %p, height %p, depth %p","saving",
+ which,noffloats,#stack,b.width,b.height,b.depth)
else
interfaces.showmessage("floatblocks",2,noffloats)
end
@@ -125,14 +134,13 @@ function floats.resave(which)
if last then
which = which or default
local stack = stacks[which]
- local b = texbox.floatbox
- local w, h, d = b.width, b.height, b.depth
- last.box = copy_node_list(b)
- texbox.floatbox = nil
+ local b = textakebox("floatbox")
+ last.box = b
insert(stack,1,last)
setcount("global","savednoffloats",#stacks[default])
if trace_floats then
- report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","resaving",which,noffloats,#stack,w,h,d)
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","resaving",
+ which,noffloats,#stack,b.width,b.height,b.depth)
else
interfaces.showmessage("floatblocks",2,noffloats)
end
@@ -146,13 +154,14 @@ function floats.flush(which,n,bylabel)
local stack = stacks[which]
local t, b, n = get(stack,n or 1,bylabel)
if t then
- local w, h, d = setdimensions(b)
if trace_floats then
- report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","flushing",which,t.n,n,w,h,d)
+ local w, h, d = setdimensions(b) -- ?
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","flushing",
+ which,t.n,n,w,h,d)
else
interfaces.showmessage("floatblocks",3,t.n)
end
- texbox.floatbox = b
+ texsetbox("floatbox",b)
last = remove(stack,n)
last.box = nil
setcount("global","savednoffloats",#stacks[default]) -- default?
@@ -166,9 +175,10 @@ function floats.consult(which,n)
local stack = stacks[which]
local t, b, n = get(stack,n)
if t then
- local w, h, d = setdimensions(b)
if trace_floats then
- report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","consulting",which,t.n,n,w,h,d)
+ local w, h, d = setdimensions(b)
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","consulting",
+ which,t.n,n,w,h,d)
end
return t, b, n
else
@@ -263,16 +273,16 @@ end
-- interface
-local context = context
-local setvalue = context.setvalue
+local context = context
+local context_setvalue = context.setvalue
-commands.flushfloat = floats.flush
-commands.savefloat = floats.save
-commands.resavefloat = floats.resave
-commands.pushfloat = floats.push
-commands.popfloat = floats.pop
-commands.consultfloat = floats.consult
-commands.collectfloat = floats.collect
+commands.flushfloat = floats.flush
+commands.savefloat = floats.save
+commands.resavefloat = floats.resave
+commands.pushfloat = floats.push
+commands.popfloat = floats.pop
+commands.consultfloat = floats.consult
+commands.collectfloat = floats.collect
function commands.getfloatvariable (...) local v = floats.getvariable(...) if v then context(v) end end
function commands.checkedpagefloat (...) local v = floats.checkedpagefloat(...) if v then context(v) end end
@@ -282,8 +292,8 @@ function commands.doifelsesavedfloat(...) commands.doifelse(floats.nofstacked(..
function commands.analysefloatmethod(str) -- currently only one method
local method, label, row, column = floats.analysemethod(str)
- setvalue("floatmethod",method or "")
- setvalue("floatlabel", label or "")
- setvalue("floatrow", row or "")
- setvalue("floatcolumn",column or "")
+ context_setvalue("floatmethod",method or "")
+ context_setvalue("floatlabel", label or "")
+ context_setvalue("floatrow", row or "")
+ context_setvalue("floatcolumn",column or "")
end
diff --git a/Master/texmf-dist/tex/context/base/page-imp.mkiv b/Master/texmf-dist/tex/context/base/page-imp.mkiv
index c22e9e646c9..230ede570ed 100644
--- a/Master/texmf-dist/tex/context/base/page-imp.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-imp.mkiv
@@ -41,7 +41,7 @@
\prependtoks
\page_shipouts_flush_text_data
\to \everylastshipout
-
+
% Problem: we need to apply the finalizers to a to be shipped out page (as
% we can have positioning involved). However, we can also add stuff in the
% imposition, like cropmarks. Fortunately we do that with metapost so
@@ -293,7 +293,7 @@
\fi
\fi}
-\def\installpagearrangement #1 % will lchange, no space
+\def\installpagearrangement #1 % will change, no space
{\setgvalue{\??layoutarranger#1}}
\def\checkinstalledpagearrangement#1% can be empty: aaa,,bbb
diff --git a/Master/texmf-dist/tex/context/base/page-inj.lua b/Master/texmf-dist/tex/context/base/page-inj.lua
index 5b450d60ece..56e5a234e8b 100644
--- a/Master/texmf-dist/tex/context/base/page-inj.lua
+++ b/Master/texmf-dist/tex/context/base/page-inj.lua
@@ -8,6 +8,8 @@ if not modules then modules = { } end modules ["page-inj"] = {
-- Adapted a bit by HH: numbered states, tracking, delayed, order, etc.
+local type, tonumber = type, tonumber
+
local injections = pagebuilders.injections or { }
pagebuilders.injections = injections
@@ -16,6 +18,11 @@ local trace = false trackers.register("pagebuilder.injections",func
local variables = interfaces.variables
+local context = context
+local commands = commands
+
+local texsetcount = tex.setcount
+
local v_yes = variables.yes
local v_previous = variables.previous
local v_next = variables.next
@@ -31,7 +38,7 @@ function injections.save(specification) -- maybe not public, just commands.*
state = tonumber(specification.state) or specification.state,
parameters = specification.userdata,
}
- tex.setcount("global","c_page_boxes_flush_n",#cache)
+ texsetcount("global","c_page_boxes_flush_n",#cache)
end
function injections.flushbefore() -- maybe not public, just commands.*
@@ -62,7 +69,7 @@ function injections.flushbefore() -- maybe not public, just commands.*
end
context.unprotect()
cache = delayed
- tex.setcount("global","c_page_boxes_flush_n",#cache)
+ texsetcount("global","c_page_boxes_flush_n",#cache)
end
end
@@ -92,7 +99,7 @@ function injections.flushafter() -- maybe not public, just commands.*
end
context.protect()
cache = delayed
- tex.setcount("global","c_page_boxes_flush_n",#cache)
+ texsetcount("global","c_page_boxes_flush_n",#cache)
end
end
diff --git a/Master/texmf-dist/tex/context/base/page-lay.mkiv b/Master/texmf-dist/tex/context/base/page-lay.mkiv
index c0d897522e8..19f23724247 100644
--- a/Master/texmf-dist/tex/context/base/page-lay.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-lay.mkiv
@@ -597,7 +597,7 @@
\let\p_page_layouts_height\empty
\def\page_layouts_synchronize
- {\setups[\layoutparameter\c!preset]%
+ {\setups[\layoutparameter\c!preset]\relax
\global\leftmarginwidth \layoutparameter\c!leftmargin
\global\rightmarginwidth\layoutparameter\c!rightmargin
\global\leftedgewidth \layoutparameter\c!leftedge
@@ -987,14 +987,13 @@
\global\let\page_adepts_pop\page_adepts_pop_indeed}
\def\page_adapts_check
+ {\csname\??pageadaptations\the\ifcsname\??pageadaptations\the\realpageno\endcsname\realpageno\else\zerocount\fi\endcsname}
+
+\def\page_adapts_reset
{\ifcsname\??pageadaptations\the\realpageno\endcsname
- \page_adapts_check_indeed
+ \global\undefinevalue{\??pageadaptations\the\realpageno}%
\fi}
-\def\page_adapts_check_indeed
- {\getvalue{\??pageadaptations\the\realpageno}%
- \letvalue{\??pageadaptations\the\realpageno}\relax}
-
\def\page_adepts_push_indeed
{\global\d_page_adepts_pushed_text_height \textheight
\global\d_page_adepts_pushed_footer_height\footerheight}
@@ -1006,6 +1005,9 @@
\global\let\page_adepts_push\page_adepts_push_indeed
\global\let\page_adepts_pop\relax}
+\appendtoks \page_adapts_check \to \everystarttext
+\appendtoks \page_adapts_reset \to \everyshipout
+
\let\page_adepts_pop \relax
\let\page_adepts_push\page_adepts_push_indeed
@@ -1024,12 +1026,12 @@
\unexpanded\def\startlayout[#1]%
{\page
- \pushmacro\currentlayout
+ \globalpushmacro\currentlayout
\doiflayoutdefinedelse{#1}{\setuplayout[#1]}\donothing} % {\setuplayout[\currentlayout]}}
\unexpanded\def\stoplayout
{\page
- \popmacro\currentlayout
+ \globalpopmacro\currentlayout
\setuplayout[\currentlayout]}
% NOG EENS NAGAAN WANNEER NU GLOBAL EN WANNEER NIET
@@ -1273,7 +1275,7 @@
{\globalpopmacro\currentlayout
\globalpopmacro\page_paper_restore
\page_paper_restore
- \setuplayout\relax}
+ \setuplayout[\currentlayout]\relax} % explicit !
%D \macros
%D {showprint, showframe, showlayout, showsetups}
diff --git a/Master/texmf-dist/tex/context/base/page-lin.lua b/Master/texmf-dist/tex/context/base/page-lin.lua
index e6b500e8bc8..0b241240c02 100644
--- a/Master/texmf-dist/tex/context/base/page-lin.lua
+++ b/Master/texmf-dist/tex/context/base/page-lin.lua
@@ -8,37 +8,47 @@ if not modules then modules = { } end modules ['page-lin'] = {
-- experimental -> will become builders
-local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end)
-
-local report_lines = logs.reporter("lines")
+-- if there is demand for it, we can support multiple numbering streams
+-- and use more than one attibute
-local texbox = tex.box
+local next, tonumber = next, tonumber
-local attributes, nodes, node, context = attributes, nodes, node, context
+local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end)
-nodes.lines = nodes.lines or { }
-local lines = nodes.lines
+local report_lines = logs.reporter("lines")
-lines.data = lines.data or { } -- start step tag
-local data = lines.data
-local last = #data
+local attributes = attributes
+local nodes = nodes
+local context = context
-lines.scratchbox = lines.scratchbox or 0
+nodes.lines = nodes.lines or { }
+local lines = nodes.lines
-local leftmarginwidth = nodes.leftmarginwidth
+lines.data = lines.data or { } -- start step tag
+local data = lines.data
+local last = #data
-storage.register("lines/data", lines.data, "nodes.lines.data")
+lines.scratchbox = lines.scratchbox or 0
--- if there is demand for it, we can support multiple numbering streams
--- and use more than one attibute
+storage.register("lines/data", data, "nodes.lines.data")
local variables = interfaces.variables
+local v_next = variables.next
+local v_page = variables.page
+local v_no = variables.no
+
local nodecodes = nodes.nodecodes
+local skipcodes = nodes.skipcodes
+local whatcodes = nodes.whatcodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local whatsit_code = nodecodes.whatsit
+local glue_code = nodecodes.glue
+local glyph_code = nodecodes.glyph
+local leftskip_code = skipcodes.leftskip
+local textdir_code = whatcodes.dir
local a_displaymath = attributes.private('displaymath')
local a_linenumber = attributes.private('linenumber')
@@ -49,12 +59,32 @@ local current_list = { }
local cross_references = { }
local chunksize = 250 -- not used in boxed
-local traverse_id = node.traverse_id
-local traverse = node.traverse
-local copy_node = node.copy
-local hpack_node = node.hpack
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
+local nuts = nodes.nuts
+
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getnext = nuts.getnext
+local getattr = nuts.getattr
+local getlist = nuts.getlist
+local getbox = nuts.getbox
+local getfield = nuts.getfield
+
+local setfield = nuts.setfield
+
+local traverse_id = nuts.traverse_id
+local traverse = nuts.traverse
+local copy_node = nuts.copy
+local hpack_node = nuts.hpack
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local is_display_math = nuts.is_display_math
+local leftmarginwidth = nuts.leftmarginwidth
+
+local negated_glue = nuts.pool.negatedglue
+local new_hlist = nuts.pool.hlist
+
+local ctx_convertnumber = context.convertnumber
+local ctx_makelinenumber = context.makelinenumber
-- cross referencing
@@ -67,16 +97,16 @@ end
local function resolve(n,m) -- we can now check the 'line' flag (todo)
while n do
- local id = n.id
+ local id = getid(n)
if id == whatsit_code then -- why whatsit
- local a = n[a_linereference]
+ local a = getattr(n,a_linereference)
if a then
cross_references[a] = m
end
elseif id == hlist_code or id == vlist_code then
- resolve(n.list,m)
+ resolve(getlist(n),m)
end
- n = n.next
+ n = getnext(n)
end
end
@@ -105,7 +135,7 @@ filters.line = filters.line or { }
function filters.line.default(data)
-- helpers.title(data.entries.linenumber or "?",data.metadata)
- context.convertnumber(data.entries.conversion or "numbers",data.entries.linenumber or "0")
+ ctx_convertnumber(data.entries.conversion or "numbers",data.entries.linenumber or "0")
end
function filters.line.page(data,prefixspec,pagespec) -- redundant
@@ -165,20 +195,20 @@ local function check_number(n,a,skip,sameline)
if sameline then
skipflag = 0
if trace_numbers then
- report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
+ report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
end
elseif not skip and s % d.step == 0 then
skipflag, d.start = 1, s + 1 -- (d.step or 1)
if trace_numbers then
- report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
+ report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
end
else
skipflag, d.start = 0, s + 1 -- (d.step or 1)
if trace_numbers then
- report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
+ report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
end
end
- context.makelinenumber(tag,skipflag,s,n.shift,n.width,leftmarginwidth(n.list),n.dir)
+ ctx_makelinenumber(tag,skipflag,s,getfield(n,"shift"),getfield(n,"width"),leftmarginwidth(getlist(n)),getfield(n,"dir"))
end
end
@@ -189,26 +219,27 @@ end
local function identify(list)
if list then
for n in traverse_id(hlist_code,list) do
- if n[a_linenumber] then
- return list
+ local a = getattr(n,a_linenumber)
+ if a then
+ return list, a
end
end
local n = list
while n do
- local id = n.id
+ local id = getid(n)
if id == hlist_code or id == vlist_code then
- local ok = identify(n.list)
+ local ok, a = identify(getlist(n))
if ok then
- return ok
+ return ok, a
end
end
- n = n.next
+ n = getnext(n)
end
end
end
function boxed.stage_zero(n)
- return identify(texbox[n].list)
+ return identify(getlist(getbox(n)))
end
-- reset ranges per page
@@ -217,66 +248,143 @@ end
function boxed.stage_one(n,nested)
current_list = { }
- local head = texbox[n]
- if head then
- local list = head.list
- if nested then
- list = identify(list)
+ local box = getbox(n)
+ if box then
+ local found = nil
+ local list = getlist(box)
+ if list and nested then
+ list, found = identify(list)
end
- local last_a, last_v, skip = nil, -1, false
- for n in traverse_id(hlist_code,list) do -- attr test here and quit as soon as zero found
- if n.height == 0 and n.depth == 0 then
- -- skip funny hlists -- todo: check line subtype
- else
- local list = n.list
- local a = list[a_linenumber]
- if a and a > 0 then
- if last_a ~= a then
- local da = data[a]
- local ma = da.method
- if ma == variables.next then
- skip = true
- elseif ma == variables.page then
- da.start = 1 -- eventually we will have a normal counter
- end
- last_a = a
- if trace_numbers then
- report_lines("starting line number range %s: start %s, continue",a,da.start,da.continue or "no")
+ if list then
+ local last_a, last_v, skip = nil, -1, false
+ for n in traverse_id(hlist_code,list) do -- attr test here and quit as soon as zero found
+ if getfield(n,"height") == 0 and getfield(n,"depth") == 0 then
+ -- skip funny hlists -- todo: check line subtype
+ else
+ local list = getlist(n)
+ local a = getattr(list,a_linenumber)
+ if not a or a == 0 then
+ local n = getnext(list)
+ while n do
+ local id = getid(n)
+ if id == whatsit_code and getsubtype(n) == textdir_code then
+ n = getnext(n)
+ elseif id == glue_code and getsubtype(n) == leftskip_code then
+ n = getnext(n)
+ else
+if id == glyph_code then
+ break
+else
+ -- can be hlist or skip (e.g. footnote line)
+ n = getnext(n)
+end
+ end
end
+ a = n and getattr(n,a_linenumber)
end
- if n[a_displaymath] then
- if nodes.is_display_math(n) then
- check_number(n,a,skip)
+ if a and a > 0 then
+ if last_a ~= a then
+ local da = data[a]
+ local ma = da.method
+ if ma == v_next then
+ skip = true
+ elseif ma == v_page then
+ da.start = 1 -- eventually we will have a normal counter
+ end
+ last_a = a
+ if trace_numbers then
+ report_lines("starting line number range %s: start %s, continue %s",a,da.start,da.continue or v_no)
+ end
end
- else
- local v = list[a_verbatimline]
- if not v or v ~= last_v then
- last_v = v
- check_number(n,a,skip)
+ if getattr(n,a_displaymath) then
+ if is_display_math(n) then
+ check_number(n,a,skip)
+ end
else
- check_number(n,a,skip,true)
+ local v = getattr(list,a_verbatimline)
+ if not v or v ~= last_v then
+ last_v = v
+ check_number(n,a,skip)
+ else
+ check_number(n,a,skip,true)
+ end
end
+ skip = false
end
- skip = false
end
end
end
end
end
+-- [dir][leftskip][content]
+
+function boxed.stage_two(n,m)
+ if #current_list > 0 then
+ m = m or lines.scratchbox
+ local t, tn = { }, 0
+ for l in traverse_id(hlist_code,getlist(getbox(m))) do
+ tn = tn + 1
+ t[tn] = copy_node(l) -- use take_box instead
+ end
+ for i=1,#current_list do
+ local li = current_list[i]
+ local n, m, ti = li[1], li[2], t[i]
+ if ti then
+ local l = getlist(n)
+ -- we want to keep leftskip at the start
+-- local id = getid(l)
+-- if id == whatsit_code and getsubtype(l) == textdir_code then
+-- l = getnext(l)
+-- id = getid(l)
+-- end
+-- if getid(l) == glue_code and getsubtype(l) == leftskip_code then
+-- -- [leftskip] [number] [rest]
+-- local forward = copy_node(l)
+-- local backward = negated_glue(l)
+-- local next = getnext(l)
+-- setfield(l,"next",backward)
+-- setfield(backward,"prev",l)
+-- setfield(backward,"next",ti)
+-- setfield(ti,"prev",backward)
+-- setfield(ti,"next",forward)
+-- setfield(forward,"prev",ti)
+-- setfield(forward,"next",next)
+-- setfield(next,"prev",forward)
+-- else
+ -- [number] [rest]
+ setfield(ti,"next",l)
+ setfield(l,"prev",ti)
+ setfield(n,"list",ti)
+-- end
+ resolve(n,m)
+ else
+ report_lines("error in linenumbering (1)")
+ return
+ end
+ end
+ end
+end
+
function boxed.stage_two(n,m)
if #current_list > 0 then
m = m or lines.scratchbox
local t, tn = { }, 0
- for l in traverse_id(hlist_code,texbox[m].list) do
+ for l in traverse_id(hlist_code,getlist(getbox(m))) do
tn = tn + 1
- t[tn] = copy_node(l)
+ t[tn] = copy_node(l) -- use take_box instead
end
for i=1,#current_list do
local li = current_list[i]
local n, m, ti = li[1], li[2], t[i]
if ti then
- ti.next, n.list = n.list, ti
+ local l = getlist(n)
+ setfield(ti,"next",l)
+ setfield(l,"prev",ti)
+ local h = copy_node(n)
+ setfield(h,"dir","TLT")
+ setfield(h,"list",ti)
+ setfield(n,"list",h)
resolve(n,m)
else
report_lines("error in linenumbering (1)")
diff --git a/Master/texmf-dist/tex/context/base/page-lin.mkvi b/Master/texmf-dist/tex/context/base/page-lin.mkvi
new file mode 100644
index 00000000000..e3b6284876e
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/page-lin.mkvi
@@ -0,0 +1,566 @@
+%D \module
+%D [ file=page-lin,
+%D version=2007.11.29,
+%D title=\CONTEXT\ Core Macros,
+%D subtitle=Line Numbering,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% generic or not ... maybe not bother too much and simplify to mkiv only
+% get rid of \mk* (left over from experimental times)
+%
+% to be redone (was experiment) .. can be hooked into margin code
+% reshuffle arguments
+
+\writestatus{loading}{ConTeXt Core Macros / Line Numbering}
+
+\unprotect
+
+% todo: save settings
+%
+% low level interface
+%
+% we should use normal counters but then we need to sync settings
+
+% some line
+%
+% \startlocallinenumbering
+% some source code 1\par
+% some source code 2\par
+% some source code 3\par
+% \stoplocallinenumbering
+%
+% some line
+
+\registerctxluafile{page-lin}{1.001}
+
+\definesystemattribute[linenumber] [public]
+\definesystemattribute[linereference][public]
+
+\appendtoksonce
+ \attribute\linenumberattribute\attributeunsetvalue
+\to \everyforgetall
+
+\newcount \linenumber % not used
+\newbox \b_page_lines_scratch
+\newcount \c_page_lines_reference
+\newconstant\c_page_lines_nesting
+
+\newconditional\tracelinenumbering % we keep this for old times sake
+
+\installtextracker
+ {lines.numbers.show}
+ {\settrue \tracelinenumbering}
+ {\setfalse\tracelinenumbering}
+
+% id nr shift width leftskip dir
+
+\installcorenamespace{linenumberinginstance}
+
+% tag skipflag s getfield(n,"shift") getfield(n,"width") leftmarginwidth(getlist(n)) getfield(n,"dir"))
+
+\let\makelinenumber\gobblesevenarguments % used at lua end
+
+\newconditional\page_postprocessors_needed_box
+
+\unexpanded\def\page_postprocessors_linenumbers_page #tag{\page_lines_add_numbers_to_box{#tag}\plusone \plusone \zerocount}
+\unexpanded\def\page_postprocessors_linenumbers_box #tag{\page_lines_add_numbers_to_box{#tag}\plusone \plusone \zerocount}
+\unexpanded\def\page_postprocessors_linenumbers_deepbox#tag{\page_lines_add_numbers_to_box{#tag}\plusone \plusone \plusone }
+\unexpanded\def\page_postprocessors_linenumbers_column #tag{\page_lines_add_numbers_to_box{#tag}\currentcolumn\nofcolumns\zerocount}
+
+\def\page_lines_parameters_regular
+ {continue = "\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi",
+ start = \number\linenumberingparameter\c!start,
+ step = \number\linenumberingparameter\c!step,
+ method = "\linenumberingparameter\c!method",
+ tag = "\currentlinenumbering"}
+
+\def\page_lines_parameters_update
+ {continue = "\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi"}
+
+\def\page_lines_start_define
+ {\setxvalue{\??linenumberinginstance\currentlinenumbering}{\ctxcommand{registerlinenumbering({\page_lines_parameters_regular})}}}
+
+\def\page_lines_start_update
+ {\ctxcommand{setuplinenumbering(\csname\??linenumberinginstance\currentlinenumbering\endcsname,{\page_lines_parameters_update})}}
+
+\def\page_lines_setup
+ {\ifcsname \??linenumberinginstance\currentlinenumbering\endcsname
+ \ctxcommand{setuplinenumbering(\csname\??linenumberinginstance\currentlinenumbering\endcsname,{\page_lines_parameters_regular})}%
+ \fi}
+
+% we could make this a bit more efficient by putting the end reference
+% in the same table as the start one but why make things complex ...
+
+\let\dofinishlinereference\dofinishfullreference % at lua end
+
+\unexpanded\def\page_lines_some_reference#1#2#3%
+ {\dontleavehmode\begingroup
+ \global\advance\c_page_lines_reference\plusone
+ \attribute\linereferenceattribute\c_page_lines_reference
+ #3%
+ % for the moment we use a simple system i.e. no prefixes etc .. todo: store as number
+ \normalexpanded{\strc_references_set_named_reference{line}{#2}{conversion=\linenumberingparameter\c!conversion}{\the\c_page_lines_reference}}% kind labels userdata text
+ \endgroup}
+
+% \def\page_lines_reference_start#1{\page_lines_some_reference{#1}{lr:b:#1}{}} % reimplemented later
+% \def\page_lines_reference_stop #1{\page_lines_some_reference{#1}{lr:e:#1}{}} % reimplemented later
+
+% \def\mklinestartreference#1[#2]{\in{#1}[lr:b:#2]} % not interfaced/ not used
+% \def\mklinestopreference #1[#2]{\in{#1}[lr:e:#2]} % not interfaced/ not used
+
+\newif\ifnumberinglines % will change
+\newif\iftypesettinglines % will change
+
+\installcorenamespace{linenumbering}
+
+\installcommandhandler \??linenumbering {linenumbering} \??linenumbering
+
+\setnewconstant\c_page_lines_mode \plusone % 0=continue, 1=restart
+\setnewconstant\c_page_lines_location \plusone % 0=middle, 1=left, 2=right, 3=inner, 4=outer, 5=text, 6=begin, 7=end
+\setnewconstant\c_page_lines_alignment\plusfive % 0=middle, 1=left, 2=right, 5=auto
+
+\newdimen\d_page_lines_width
+\newdimen\d_page_lines_distance
+
+\newevery \beforeeverylinenumbering \relax
+\newevery \aftereverylinenumbering \relax
+\newevery \everylinenumber \relax
+
+\appendtoks
+ \page_lines_setup
+\to \everysetuplinenumbering
+
+\appendtoks
+ \page_lines_start_define
+\to \everydefinelinenumbering
+
+\setuplinenumbering
+ [\c!conversion=\v!numbers,
+ \c!start=1,
+ \c!step=1,
+ \c!method=\v!first,
+ \c!continue=\v!no,
+ \c!style=,
+ \c!color=,
+ \c!width=2\emwidth,
+ \c!left=,
+ \c!right=,
+ \c!command=,
+ \c!margin=2.5\emwidth,
+ \c!distance=\zeropoint,
+ \c!location=\v!default, % depends on direction, columns etc
+ \c!align=\v!auto]
+
+\definelinenumbering
+ []
+
+% \startlinenumbering[|continue|settings|name]
+% \startlinenumbering[name][|continue|settings]
+
+\unexpanded\def\startlinenumbering
+ {\dodoubleempty\page_lines_start}
+
+\def\page_lines_start % we stay downward compatible
+ {\begingroup
+ \ifsecondargument
+ \expandafter\page_lines_start_two
+ \else\iffirstargument
+ \doubleexpandafter\page_lines_start_one
+ \else
+ \doubleexpandafter\page_lines_start_zero
+ \fi\fi}
+
+\def\page_lines_start_zero[#1][#2]%
+ {\edef\m_argument{\linenumberingparameter\c!continue}%
+ \ifx\m_argument\v!continue
+ \c_page_lines_mode\zerocount
+ \else
+ \c_page_lines_mode\plusone
+ \fi
+ \page_lines_start_followup}
+
+\def\page_lines_start_one[#1][#2]% [continue||settings] % historic
+ {\edef\m_argument{#1}%
+ \ifx\m_argument\v!continue
+ \c_page_lines_mode\zerocount
+ \let\currentlinenumbering\empty
+ \else
+ \c_page_lines_mode\plusone
+ \ifx\m_argument\v!empty
+ \let\currentlinenumbering\empty
+ \else
+ \doifassignmentelse{#1}
+ {\let\currentlinenumbering\empty
+ \setupcurrentlinenumbering[#1]}
+ {\doifnumberelse\m_argument
+ {\let\currentlinenumbering\empty
+ \letlinenumberingparameter\c!start\m_argument}
+ {\let\currentlinenumbering\m_argument}}%
+ \fi
+ \edef\p_continue{\linenumberingparameter\c!continue}%
+ \ifx\p_continue\v!yes
+ \c_page_lines_mode\zerocount
+ \fi
+ \fi
+ \page_lines_start_followup}
+
+\def\page_lines_start_two[#1][#2]% [tag][continue||settings]
+ {\edef\currentlinenumbering{#1}%
+ \edef\m_argument{#2}%
+ \ifx\m_argument\v!continue
+ \c_page_lines_mode\zerocount
+ \else
+ \c_page_lines_mode\plusone
+ \ifx\m_argument\v!empty \else
+ \doifassignmentelse{#2}
+ {\setupcurrentlinenumbering[#2]}
+ {\doifnumber\m_argument
+ {\letlinenumberingparameter\c!start\m_argument}}%
+ \fi
+ \edef\p_continue{\linenumberingparameter\c!continue}%
+ \ifx\p_continue\v!yes
+ \c_page_lines_mode\zerocount
+ \fi
+ \fi
+ \page_lines_start_followup}
+
+\newconditional\c_page_lines_auto_narrow
+
+\def\page_lines_start_followup
+ {\numberinglinestrue
+ \edef\p_location{\linenumberingparameter\c!location}%
+ \setfalse\c_page_lines_auto_narrow
+ \ifhmode \else
+ \ifx\p_location\v!text
+ \ifdim\leftskip>\zeropoint \else
+ \advance\leftskip\linenumberingparameter\c!margin
+ \settrue\c_page_lines_auto_narrow
+ \fi
+ \else\ifx\p_location\v!begin
+ \ifdim\leftskip>\zeropoint \else
+ \advance\leftskip\linenumberingparameter\c!margin
+ \settrue\c_page_lines_auto_narrow
+ \fi
+ \else\ifx\p_location\v!end
+ \ifdim\leftskip>\zeropoint \else
+ \advance\rightskip\linenumberingparameter\c!margin
+ \settrue\c_page_lines_auto_narrow
+ \fi
+ \fi\fi\fi
+ \fi
+ \the\beforeeverylinenumbering
+ \globallet\page_postprocessors_page \page_postprocessors_linenumbers_page
+ \globallet\page_postprocessors_column\page_postprocessors_linenumbers_column
+ \global\settrue\page_postprocessors_needed_box % see core-rul.mkiv
+ \ifcase\c_page_lines_mode\relax
+ \page_lines_start_update % continue
+ \or
+ \page_lines_start_define % only when assignment
+ \fi
+ \attribute\linenumberattribute\csname\??linenumberinginstance\currentlinenumbering\endcsname\relax}
+
+\unexpanded\def\stoplinenumbering
+ {\attribute\linenumberattribute\attributeunsetvalue
+ \the\aftereverylinenumbering
+ \ifconditional\c_page_lines_auto_narrow\par\fi
+ \endgroup}
+
+% number placement .. will change into (the new) margin code
+
+\newconditional\c_page_lines_fake_number
+\newconstant \b_page_lines_number
+\newconstant \c_page_lines_column
+\newconstant \c_page_lines_last_column
+\newdimen \d_page_lines_line_width
+\settrue \c_page_lines_dir_left_to_right
+
+\installcorenamespace{linenumberinghandler}
+
+\def\page_line_swap_align % can become a helper
+ {\ifx\p_align\v!inner \let\p_align\v!outer \else
+ \ifx\p_align\v!outer \let\p_align\v!inner \else
+ \ifx\p_align\v!flushleft \let\p_align\v!flushright\else
+ \ifx\p_align\v!flushright\let\p_align\v!flushleft \else
+ \ifx\p_align\v!left \let\p_align\v!right \else
+ \ifx\p_align\v!right \let\p_align\v!left \fi\fi\fi\fi\fi\fi}
+
+\def\page_lines_add_numbers_to_box#box#column#max#nesting%
+ {\bgroup
+ \b_page_lines_number #box\relax
+ \c_page_lines_column #column\relax
+ \c_page_lines_last_column#max\relax
+ \c_page_lines_nesting #nesting\relax
+ \fullrestoreglobalbodyfont
+ \let\makelinenumber\page_lines_make_number % used at lua end
+ \setbox\b_page_lines_scratch\vbox
+ {\forgetall
+ \offinterlineskip
+ \ctxcommand{linenumbersstageone(\number\b_page_lines_number,\ifcase\c_page_lines_nesting false\else true\fi)}}%
+ \ctxcommand{linenumbersstagetwo(\number\b_page_lines_number,\number\b_page_lines_scratch)}% can move to lua code
+ \egroup}
+
+\let\page_lines_make_number_indeed\relax
+
+% \def\page_lines_rlap{\ifconditional\c_page_lines_dir_left_to_right\expandafter\rlap\else\expandafter\llap\fi}
+% \def\page_lines_llap{\ifconditional\c_page_lines_dir_left_to_right\expandafter\llap\else\expandafter\rlap\fi}
+
+\def\page_lines_add_numbers_to_box#box#column#max#nesting%
+ {\bgroup
+ \b_page_lines_number #box\relax
+ \c_page_lines_column #column\relax
+ \c_page_lines_last_column#max\relax
+ \c_page_lines_nesting #nesting\relax
+ \fullrestoreglobalbodyfont
+ \let\makelinenumber\page_lines_make_number % used at lua end
+ \setbox\b_page_lines_scratch\vbox
+ {\forgetall
+ \offinterlineskip
+ \ctxcommand{linenumbersstageone(\number\b_page_lines_number,\ifcase\c_page_lines_nesting false\else true\fi)}}%
+ \ctxcommand{linenumbersstagetwo(\number\b_page_lines_number,\number\b_page_lines_scratch)}% can move to lua code
+ \egroup}
+
+\def\page_lines_make_number#tag#mode#linenumber#shift#width#leftskip#dir% beware, one needs so compensate for this in the \hsize
+ {\naturalhbox to \zeropoint \bgroup
+ \ifcase#mode\relax
+ % \settrue \c_page_lines_fake_number
+ \else
+ % \setfalse\c_page_lines_fake_number
+ \edef\currentlinenumbering{#tag}%
+ \def\linenumber{#linenumber}% unsafe
+ \d_page_lines_line_width#width\scaledpoint\relax
+ \d_page_lines_distance\linenumberingparameter\c!distance\relax
+ \edef\p_align{\linenumberingparameter\c!align}%
+ \edef\p_location{\linenumberingparameter\c!location}%
+ \ifcase\istltdir#dir\relax
+ \settrue \c_page_lines_dir_left_to_right
+ \else
+ \setfalse\c_page_lines_dir_left_to_right
+ \fi
+ %
+ % maybe we also need an option to ignore columns, so that we renumber
+ % once but on the other hand this assumes aligned lines
+ %
+ \ifcase\c_page_lines_last_column\relax
+ \settrue \c_page_lines_fake_number % why
+ \or
+ % one column
+ \or
+ % two columns
+ \ifx\p_location\v!default % or just margin
+ \ifcase\c_page_lines_column\relax
+ \settrue \c_page_lines_fake_number % why
+ \or
+ % one
+ \let\p_location\v!left
+ \else
+ % two
+ \let\p_location\v!right
+ % can become a helper
+ \page_line_swap_align
+ \fi
+ \fi
+ \else
+ % too fuzzy
+ \fi
+ \ifx\p_location\v!default
+ \ifconditional\c_page_lines_dir_left_to_right
+ \let\p_location\v!left
+ \else
+ \let\p_location\v!right
+ \page_line_swap_align % yes or no
+ \fi
+ \fi
+ %
+ \executeifdefined{\??linenumberinghandler\p_location}\relax
+ \fi
+ \egroup}
+
+\def\page_lines_number_inject#align#width%
+ {\edef\p_width{\linenumberingparameter\c!width}%
+ \ifx\p_width\v!margin
+ \d_page_lines_width#width%
+ \else
+ \d_page_lines_width\p_width
+ \fi
+ \relax
+ \ifdim\d_page_lines_width>\zeropoint
+% \ifconditional\c_page_lines_dir_left_to_right\else
+% \let\simplealignedbox\simplereversealignedbox
+% \fi
+ \ifconditional\tracelinenumbering
+ \ruledhbox{\simplealignedbox\d_page_lines_width#align{\page_lines_number_inject_indeed}}%
+ \else
+ \simplealignedbox\d_page_lines_width#align{\page_lines_number_inject_indeed}%
+ \fi
+ \else
+ \ifconditional\tracelinenumbering
+ \ruledhbox
+ \else
+ % \hbox
+ \fi
+ {\page_lines_number_inject_indeed}%
+ \fi}
+
+\def\page_lines_number_inject_indeed
+ {\uselinenumberingstyleandcolor\c!style\c!color
+ \linenumberingparameter\c!command
+ {\linenumberingparameter\c!left
+ \convertnumber{\linenumberingparameter\c!conversion}\linenumber
+ \linenumberingparameter\c!right}}
+
+% \def\dodorlap{\hbox to \zeropoint{\box\nextbox\normalhss}\endgroup}
+% \def\dodollap{\hbox to \zeropoint{\normalhss\box\nextbox}\endgroup}
+
+\def\page_line_handle_left#align#width#distance%
+ {\llap
+ {\page_lines_number_inject#align#width%
+ \kern\dimexpr#distance+\d_page_lines_distance\relax
+ \the\everylinenumber
+ \hss}}
+
+\def\page_line_handle_right#align#width#distance%
+ {\rlap
+ {\kern\dimexpr#distance+\d_page_lines_distance+\d_page_lines_line_width\relax
+ \page_lines_number_inject#align#width%
+ \the\everylinenumber}}
+
+\setuvalue{\??linenumberinghandler\v!left}%
+ {\page_line_handle_left\p_align\leftmarginwidth\leftmargindistance}
+
+\setuvalue{\??linenumberinghandler\v!right}%
+ {\page_line_handle_right\p_align\rightmarginwidth\rightmargindistance}
+
+\setuvalue{\??linenumberinghandler\v!inner}%
+ {\ifodd\realpageno
+ \ifx\p_align\v!inner
+ \page_line_handle_left\v!flushleft\leftmarginwidth\leftmargindistance
+ \else\ifx\p_align\v!outer
+ \page_line_handle_left\v!flushright\leftmarginwidth\leftmargindistance
+ \else
+ \page_line_handle_left\p_align\leftmarginwidth\leftmargindistance
+ \fi\fi
+ \else
+ \ifx\p_align\v!inner
+ \page_line_handle_right\v!flushright\rightmarginwidth\rightmargindistance
+ \else\ifx\p_align\v!outer
+ \page_line_handle_right\v!flushleft\rightmarginwidth\rightmargindistance
+ \else
+ \page_line_handle_right\p_align\rightmarginwidth\rightmargindistance
+ \fi\fi
+ \fi}
+
+\setuvalue{\??linenumberinghandler\v!outer}%
+ {\ifodd\realpageno
+ \ifx\p_align\v!inner
+ \page_line_handle_right\v!flushleft\leftmarginwidth\leftmargindistance
+ \else\ifx\p_align\v!outer
+ \page_line_handle_right\v!flushright\leftmarginwidth\leftmargindistance
+ \else
+ \page_line_handle_right\p_align\leftmarginwidth\leftmargindistance
+ \fi\fi
+ \else
+ \ifx\p_align\v!inner
+ \page_line_handle_left\v!flushright\rightmarginwidth\rightmargindistance
+ \else\ifx\p_align\v!outer
+ \page_line_handle_left\v!flushleft\rightmarginwidth\rightmargindistance
+ \else
+ \page_line_handle_left\p_align\rightmarginwidth\rightmargindistance
+ \fi\fi
+ \fi}
+
+\def\page_line_handle_begin#align%
+ {\rlap
+ {\kern\d_page_lines_distance
+ \page_lines_number_inject#align\zeropoint
+ \the\everylinenumber}}
+
+\def\page_line_handle_end#align%
+ {\rlap
+ {\kern\d_page_lines_line_width\relax
+ \llap
+ {\page_lines_number_inject#align\zeropoint
+ \kern\d_page_lines_distance
+ \the\everylinenumber}}}
+
+\setuvalue{\??linenumberinghandler\v!begin}{\page_line_handle_begin\p_align}
+\setuvalue{\??linenumberinghandler\v!end }{\page_line_handle_end \p_align}
+\setuvalue{\??linenumberinghandler\v!text }{\page_line_handle_begin\p_align}
+
+\setuevalue{\??linenumberinghandler\v!inleft }{\getvalue{\??linenumberinghandler\v!left }}
+\setuevalue{\??linenumberinghandler\v!inmargin}{\getvalue{\??linenumberinghandler\v!left }}
+\setuevalue{\??linenumberinghandler\v!margin }{\getvalue{\??linenumberinghandler\v!left }}
+\setuevalue{\??linenumberinghandler\v!inright }{\getvalue{\??linenumberinghandler\v!right}}
+
+% referencing: \permithyphenation, also removes leading spaces (new per 29-11-2013)
+
+\unexpanded\def\someline [#1]{\page_lines_reference_start{#1}\page_lines_reference_stop{#1}} % was just a def
+\unexpanded\def\startline[#1]{\page_lines_reference_start{#1}\ignorespaces}
+\unexpanded\def\stopline [#1]{\removeunwantedspaces\permithyphenation\page_lines_reference_stop{#1}}
+
+\def\page_lines_reference_show_start
+ {\ifconditional\tracelinenumbering
+ \expandafter\page_lines_reference_show_start_indeed
+ \else
+ \expandafter\gobbleoneargument
+ \fi}
+
+\def\page_lines_reference_show_stop
+ {\ifconditional\tracelinenumbering
+ \expandafter\page_lines_reference_show_stop_indeed
+ \else
+ \expandafter\gobbleoneargument
+ \fi}
+
+\def\page_lines_reference_show_start_indeed#1%
+ {\setbox\scratchbox\hbox{\llap
+ {\vrule\s!width\onepoint\s!depth\strutdp\s!height.8\strutht\raise.85\strutht\hbox{\llap{\tt\txx#1}}}}%
+ \smashbox\scratchbox
+ \box\scratchbox}
+
+\def\page_lines_reference_show_stop_indeed#1%
+ {\setbox\scratchbox\hbox{\rlap
+ {\raise.85\strutht\hbox{\rlap{\tt\txx#1}}\vrule\s!width\onepoint\s!depth\strutdp\s!height.8\strutht}}%
+ \smashbox\scratchbox
+ \box\scratchbox}
+
+\def\page_lines_reference_start#1{\page_lines_some_reference{#1}{lr:b:#1}{\page_lines_reference_show_start{#1}}}
+\def\page_lines_reference_stop #1{\page_lines_some_reference{#1}{lr:e:#1}{\page_lines_reference_show_stop {#1}}}
+
+% eventually we will do this in lua
+
+\def\currentreferencelinenumber{\ctxcommand{filterreference("linenumber")}}
+
+\let\m_page_lines_from\empty
+\let\m_page_lines_to \empty
+
+\unexpanded\def\doifelsesamelinereference#1#2#3%
+ {\doifreferencefoundelse{lr:b:#1}
+ {\edef\m_page_lines_from{\currentreferencelinenumber}%
+ \doifreferencefoundelse{lr:e:#1}
+ {\edef\m_page_lines_to{\currentreferencelinenumber}%
+ %[\m_page_lines_from,\m_page_lines_to]
+ \ifx\m_page_lines_from\m_page_lines_to#2\else#3\fi}
+ {#2}}
+ {#2}}
+
+\unexpanded\def\inline#1[#2]%
+ {\doifelsenothing{#1}
+ {\doifelsesamelinereference{#2}
+ {\in{\leftlabeltext\v!line}{\rightlabeltext\v!line}[lr:b:#2]}
+ {\in{\leftlabeltext\v!lines}{}[lr:b:#2]--\in{}{\rightlabeltext\v!lines}[lr:e:#2]}}
+ {\doifelsesamelinereference{#2}
+ {\in{#1}[lr:b:#2]}
+ {\in{#1}[lr:b:#2]--\in[lr:e:#2]}}}
+
+\unexpanded\def\inlinerange[#1]%
+ {\doifelsesamelinereference{#1}
+ {\in[lr:b:#1]}
+ {\in[lr:b:#1]\endash\in[lr:e:#1]}}
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-mak.mkvi b/Master/texmf-dist/tex/context/base/page-mak.mkvi
index 71af520a196..c910f281d23 100644
--- a/Master/texmf-dist/tex/context/base/page-mak.mkvi
+++ b/Master/texmf-dist/tex/context/base/page-mak.mkvi
@@ -91,13 +91,60 @@
\def\page_makeup_start_yes[#name]% [#settings]%
{\doifelsecommandhandler\??makeup{#name}\page_makeup_start_indeed\page_makeup_start_nop[#name]}%
+% case 1:
+%
+% \setuplayout[height=5cm]
+%
+% case 2:
+%
+% \definelayout[crap][height=10cm]
+% \definelayout[standard][crap]
+%
+% case 3:
+%
+% \setuplayout[standard][height=15cm]
+%
+% case 4:
+%
+% \definelayout[whatever][height=2cm]
+% \setuplayout[whatever]
+
\def\page_makeup_start_indeed[#name][#settings]%
- {\doifelsenothing{\namedmakeupparameter{#name}\c!page}
- {\page}% new, so best not have dangling mess here like references (we could capture then and flush embedded)
- {\page[\namedmakeupparameter{#name}\c!page]}%
+ {% the next grouping hack is somewhat messy:
+ \begingroup
+ % we need to figure out the current layout
+ \xdef\m_page_makeup_name{#name}%
+ \let\currentmakeup\m_page_makeup_name
+ \let\currentlayout\m_page_makeup_name
+ \xdef\m_page_makeup_layout_parent{\layoutparameter\s!parent}%
+ \setupcurrentmakeup[#settings]%
+ \edef\p_page{\makeupparameter\c!page}%
+ \ifx\p_page\empty
+ \endgroup
+ \page % new, so best not have dangling mess here like references (we could capture then and flush embedded)
+ \else\ifx\p_page\v!no
+ % nothing
+ \endgroup
+ \else
+ \endgroup
+ \page[\p_page]%
+ \fi\fi
+ % some dirty trickery (sorry) for determining if we have
+ % - a layout definition at all
+ % - inherit from the parent of that definition
+ % - inherit from the current layout otherwise
+ \ifx\m_page_makeup_name\currentlayout
+ % we already use the layout
+ \else\ifx\m_page_makeup_layout_parent\??layout
+ % we inherit from the current layout
+ \normalexpanded{\setuplayout[#name][\s!parent=\??layout\currentlayout]}% is remembered but checked later anyway
+ % \else
+ % we have an inherited layout
+ \fi\fi
\startlayout[#name]% includes \page
\bgroup
- \edef\currentmakeup{#name}%
+ %\edef\currentmakeup{#name}%
+ \let\currentmakeup\m_page_makeup_name
\setupcurrentmakeup[#settings]%
\setsystemmode\v!makeup
\the\t_page_makeup_every_setup
@@ -137,7 +184,12 @@
\fi \fi
\strc_pagenumbers_page_state_pop % new
\egroup
- \stoplayout} % includes \page
+ \stoplayout % includes \page
+ \ifx\m_page_makeup_name\currentlayout
+ \else\ifx\m_page_makeup_layout_parent\??layout
+ \normalexpanded{\setuplayout[\m_page_makeup_name][\s!parent=\??layout]}% is remembered but checked later anyway
+ % \else
+ \fi\fi}
\setvalue{\??makeupdoublesided\v!yes}%
{\emptyhbox
@@ -184,6 +236,7 @@
\c!headerstate=\v!stop,
\c!footerstate=\v!stop,
\c!pagestate=\v!stop] % in manual ! ! !
+% \c!pagestate=\v!start]
\definemakeup
[\v!standard]
diff --git a/Master/texmf-dist/tex/context/base/page-mix.lua b/Master/texmf-dist/tex/context/base/page-mix.lua
index cf009478736..0fbaa4e30b4 100644
--- a/Master/texmf-dist/tex/context/base/page-mix.lua
+++ b/Master/texmf-dist/tex/context/base/page-mix.lua
@@ -15,48 +15,79 @@ if not modules then modules = { } end modules ["page-mix"] = {
local concat = table.concat
-local nodecodes = nodes.nodecodes
-local gluecodes = nodes.gluecodes
-local nodepool = nodes.pool
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local kern_code = nodecodes.kern
-local glue_code = nodecodes.glue
-local penalty_code = nodecodes.penalty
-local insert_code = nodecodes.ins
-local mark_code = nodecodes.mark
-
-local new_hlist = nodepool.hlist
-local new_vlist = nodepool.vlist
-local new_glue = nodepool.glue
-
-local hpack = node.hpack
-local vpack = node.vpack
-local freenode = node.free
-
-local texbox = tex.box
-local texskip = tex.skip
-local texdimen = tex.dimen
-local points = number.points
-local settings_to_hash = utilities.parsers.settings_to_hash
-
-local variables = interfaces.variables
-local v_yes = variables.yes
-local v_global = variables["global"]
-local v_local = variables["local"]
-local v_columns = variables.columns
-
local trace_state = false trackers.register("mixedcolumns.trace", function(v) trace_state = v end)
local trace_detail = false trackers.register("mixedcolumns.detail", function(v) trace_detail = v end)
local report_state = logs.reporter("mixed columns")
+local nodecodes = nodes.nodecodes
+local gluecodes = nodes.gluecodes
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local insert_code = nodecodes.ins
+local mark_code = nodecodes.mark
+local rule_code = nodecodes.rule
+
+local topskip_code = gluecodes.topskip
+local lineskip_code = gluecodes.lineskip
+local baselineskip_code = gluecodes.baselineskip
+local userskip_code = gluecodes.userskip
+
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local nodetostring = nuts.tostring
+local listtoutf = nodes.listtoutf
+
+local hpack = nuts.hpack
+local vpack = nuts.vpack
+local freenode = nuts.free
+local concatnodes = nuts.concat
+local slidenodes = nuts.slide -- ok here as we mess with prev links intermediately
+local traversenodes = nuts.traverse
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getsubtype = nuts.getsubtype
+local getbox = nuts.getbox
+local setbox = nuts.setbox
+local getskip = nuts.getskip
+local getattribute = nuts.getattribute
+
+local nodepool = nuts.pool
+
+local new_hlist = nodepool.hlist
+local new_vlist = nodepool.vlist
+local new_glue = nodepool.glue
+
+local points = number.points
+
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local variables = interfaces.variables
+local v_yes = variables.yes
+local v_global = variables["global"]
+local v_local = variables["local"]
+local v_columns = variables.columns
+local v_fixed = variables.fixed
+local v_auto = variables.auto
+local v_none = variables.none
+local v_more = variables.more
+local v_less = variables.less
+
pagebuilders = pagebuilders or { }
pagebuilders.mixedcolumns = pagebuilders.mixedcolumns or { }
local mixedcolumns = pagebuilders.mixedcolumns
-local forcedbreak = -123
+local a_checkedbreak = attributes.private("checkedbreak")
+local forcedbreak = -123
-- initializesplitter(specification)
-- cleanupsplitter()
@@ -73,13 +104,13 @@ local function collectinserts(result,nxt,nxtid)
local inserts, currentskips, nextskips, inserttotal = { }, 0, 0, 0
while nxt do
if nxtid == insert_code then
- inserttotal = inserttotal + nxt.height + nxt.depth
- local s = nxt.subtype
+ inserttotal = inserttotal + getfield(nxt,"height") + getfield(nxt,"depth")
+ local s = getsubtype(nxt)
local c = inserts[s]
if not c then
c = { }
inserts[s] = c
- local width = texskip[s].width
+ local width = getfield(getskip(s),"width")
if not result.inserts[s] then
currentskips = currentskips + width
end
@@ -96,9 +127,9 @@ local function collectinserts(result,nxt,nxtid)
else
break
end
- nxt = nxt.next
+ nxt = getnext(nxt)
if nxt then
- nxtid = nxt.id
+ nxtid = getid(nxt)
else
break
end
@@ -124,33 +155,31 @@ end
local function discardtopglue(current,discarded)
local size = 0
while current do
- local id = current.id
+ local id = getid(current)
if id == glue_code then
- size = size + current.spec.width
+ size = size + getfield(getfield(current,"spec"),"width")
discarded[#discarded+1] = current
- current = current.next
+ current = getnext(current)
elseif id == penalty_code then
- if current.penalty == forcedbreak then
+ if getfield(current,"penalty") == forcedbreak then
discarded[#discarded+1] = current
- current = current.next
- while current do
- local id = current.id
- if id == glue_code then
- size = size + current.spec.width
- discarded[#discarded+1] = current
- current = current.next
- else
- break
- end
+ current = getnext(current)
+ while current and getid(current) == glue_code do
+ size = size + getfield(getfield(current,"spec"),"width")
+ discarded[#discarded+1] = current
+ current = getnext(current)
end
else
discarded[#discarded+1] = current
- current = current.next
+ current = getnext(current)
end
else
break
end
end
+ if current then
+ setfield(current,"prev",nil) -- prevent look back
+ end
return current, size
end
@@ -160,13 +189,13 @@ local function stripbottomglue(results,discarded)
local r = results[i]
local t = r.tail
while t and t ~= r.head do
- local prev = t.prev
+ local prev = getprev(t)
if not prev then
break
end
- local id = t.id
+ local id = getid(t)
if id == penalty_code then
- if t.penalty == forcedbreak then
+ if getfield(t,"penalty") == forcedbreak then
break
else
discarded[#discarded+1] = t
@@ -175,7 +204,7 @@ local function stripbottomglue(results,discarded)
end
elseif id == glue_code then
discarded[#discarded+1] = t
- local width = t.spec.width
+ local width = getfield(getfield(t,"spec"),"width")
if trace_state then
report_state("columns %s, discarded bottom glue %p",i,width)
end
@@ -199,25 +228,30 @@ local function setsplit(specification) -- a rather large function
report_state("fatal error, no box")
return
end
- local list = texbox[box]
+ local list = getbox(box)
if not list then
report_state("fatal error, no list")
return
end
- local head = list.head or specification.originalhead
+ local head = getlist(list) or specification.originalhead
if not head then
report_state("fatal error, no head")
return
end
+ slidenodes(head) -- we can have set prev's to nil to prevent backtracking
local discarded = { }
local originalhead = head
- local originalwidth = specification.originalwidth or list.width
- local originalheight = specification.originalheight or list.height
+ local originalwidth = specification.originalwidth or getfield(list,"width")
+ local originalheight = specification.originalheight or getfield(list,"height")
local current = head
local skipped = 0
local height = 0
local depth = 0
local skip = 0
+ local splitmethod = specification.splitmethod or false
+ if splitmethod == v_none then
+ splitmethod = false
+ end
local options = settings_to_hash(specification.option or "")
local stripbottom = specification.alternative == v_local
local cycle = specification.cycle or 1
@@ -253,12 +287,104 @@ local function setsplit(specification) -- a rather large function
delta = 0,
}
end
- local column = 1
- local line = 0
- local result = results[column]
- local lasthead = nil
- local rest = nil
+
+ local column = 1
+ local line = 0
+ local result = results[1]
+ local lasthead = nil
+ local rest = nil
+ local lastlocked = nil
+ local lastcurrent = nil
+ local lastcontent = nil
+ local backtracked = false
+
+ if trace_state then
+ report_state("setting collector to column %s",column)
+ end
+
+ local function unlock(penalty)
+ if lastlocked then
+ if trace_state then
+ report_state("penalty %s, unlocking in column %s",penalty or "-",column)
+ end
+ lastlocked = nil
+ end
+ lastcurrent = nil
+ lastcontent = nil
+ end
+
+ local function lock(penalty,current)
+ if trace_state then
+ report_state("penalty %s, locking in column %s",penalty,column)
+ end
+ lastlocked = penalty
+ lastcurrent = current or lastcurrent
+ lastcontent = nil
+ end
+
+ local function backtrack(start)
+ local current = start
+ -- first skip over glue and penalty
+ while current do
+ local id = getid(current)
+ if id == glue_code then
+ if trace_state then
+ report_state("backtracking over %s in column %s","glue",column)
+ end
+ current = getprev(current)
+ elseif id == penalty_code then
+ if trace_state then
+ report_state("backtracking over %s in column %s","penalty",column)
+ end
+ current = getprev(current)
+ else
+ break
+ end
+ end
+ -- then skip over content
+ while current do
+ local id = getid(current)
+ if id == glue_code then
+ if trace_state then
+ report_state("quitting at %s in column %s","glue",column)
+ end
+ break
+ elseif id == penalty_code then
+ if trace_state then
+ report_state("quitting at %s in column %s","penalty",column)
+ end
+ break
+ else
+ current = getprev(current)
+ end
+ end
+ if not current then
+ if trace_state then
+ report_state("no effective backtracking in column %s",column)
+ end
+ current = start
+ end
+ return current
+ end
+
local function gotonext()
+ if lastcurrent then
+ if current ~= lastcurrent then
+ if trace_state then
+ report_state("backtracking to preferred break in column %s",column)
+ end
+ -- todo: also remember height/depth
+ current = backtrack(lastcurrent)
+ backtracked = true
+ end
+ lastcurrent = nil
+ if lastlocked then
+ if trace_state then
+ report_state("unlocking in column %s",column)
+ end
+ lastlocked = nil
+ end
+ end
if head == lasthead then
if trace_state then
report_state("empty column %s, needs more work",column)
@@ -271,7 +397,7 @@ local function setsplit(specification) -- a rather large function
if current == head then
result.tail = head
else
- result.tail = current.prev
+ result.tail = getprev(current)
end
result.height = height
result.depth = depth
@@ -282,19 +408,24 @@ local function setsplit(specification) -- a rather large function
if column == nofcolumns then
column = 0 -- nicer in trace
rest = head
- -- lasthead = head
return false, 0
else
local skipped
column = column + 1
result = results[column]
+ if trace_state then
+ report_state("setting collector to column %s",column)
+ end
current, skipped = discardtopglue(current,discarded)
+ if trace_detail and skipped ~= 0 then
+ report_state("check > column 1, discarded %p",skipped)
+ end
head = current
- -- lasthead = head
return true, skipped
end
end
- local function checked(advance,where)
+
+ local function checked(advance,where,locked)
local total = skip + height + depth + advance
local delta = total - target
local state = "same"
@@ -311,115 +442,226 @@ local function setsplit(specification) -- a rather large function
end
end
if trace_detail then
- report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p, discarded %p => %a (height %p, depth %p, skip %p)",
+ report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p => %a (height %p, depth %p, skip %p)",
where,curcol,delta,threshold,advance,total,target,state,skipped,height,depth,skip)
end
return state, skipped
end
+
current, skipped = discardtopglue(current,discarded)
if trace_detail and skipped ~= 0 then
report_state("check > column 1, discarded %p",skipped)
end
+
+ -- problem: when we cannot break after a list (and we only can expect same-page situations as we don't
+ -- care too much about weighted breaks here) we should sort of look ahead or otherwise be able to push
+ -- back inserts and so
+ --
+ -- ok, we could use vsplit but we don't have that one opened up yet .. maybe i should look into the c-code
+ -- .. something that i try to avoid so let's experiment more before we entry dirty trick mode
+
head = current
- while current do
- local id = current.id
- local nxt = current.next
-local lastcolumn = column
- if id == hlist_code or id == vlist_code then
- line = line + 1
- local nxtid = nxt and nxt.id
- local inserts, currentskips, nextskips, inserttotal = nil, 0, 0, 0
- local advance = current.height -- + current.depth
- if nxt and (nxtid == insert_code or nxtid == mark_code) then
- nxt, inserts, localskips, insertskips, inserttotal = collectinserts(result,nxt,nxtid)
- end
- local state, skipped = checked(advance+inserttotal+currentskips,"line")
+
+ local function process_skip(current,nxt)
+ local advance = getfield(getfield(current,"spec"),"width")
+ if advance ~= 0 then
+ local state, skipped = checked(advance,"glue")
if trace_state then
- report_state("%-7s > column %s, state %a, line %s, advance %p, insert %p, height %p","line",column,state,line,advance,inserttotal,height)
+ report_state("%-7s > column %s, state %a, advance %p, height %p","glue",column,state,advance,height)
if skipped ~= 0 then
- report_state("%-7s > column %s, discarded %p","line",column,skipped)
+ report_state("%-7s > column %s, discarded %p","glue",column,skipped)
end
end
if state == "quit" then
- break
- else
- height = height + depth + skip + advance + inserttotal
- if state == "next" then
- height = height + nextskips
- else
- height = height + currentskips
- end
+ return true
end
- depth = current.depth
- skip = 0
- if inserts then
- appendinserts(result.inserts,inserts)
+ height = height + depth + skip
+ depth = 0
+ skip = height > 0 and advance or 0
+ if trace_state then
+ report_state("%-7s > column %s, height %p, depth %p, skip %p","glue",column,height,depth,skip)
end
- elseif id == glue_code then
- local advance = current.spec.width
- if advance ~= 0 then
- local state, skipped = checked(advance,"glue")
- if trace_state then
- report_state("%-7s > column %s, state %a, advance %p, height %p","glue",column,state,advance,height)
- if skipped ~= 0 then
- report_state("%-7s > column %s, discarded %p","glue",column,skipped)
- end
+ else
+ -- what else? ignore? treat as valid as usual?
+ end
+ if lastcontent then
+ unlock()
+ end
+ end
+
+ local function process_kern(current,nxt)
+ local advance = getfield(current,"kern")
+ if advance ~= 0 then
+ local state, skipped = checked(advance,"kern")
+ if trace_state then
+ report_state("%-7s > column %s, state %a, advance %p, height %p, state %a","kern",column,state,advance,height)
+ if skipped ~= 0 then
+ report_state("%-7s > column %s, discarded %p","kern",column,skipped)
end
- if state == "quit" then
- break
+ end
+ if state == "quit" then
+ return true
+ end
+ height = height + depth + skip + advance
+ depth = 0
+ skip = 0
+ if trace_state then
+ report_state("%-7s > column %s, height %p, depth %p, skip %p","kern",column,height,depth,skip)
+ end
+ end
+ end
+
+ local function process_rule(current,nxt)
+ -- simple variant of h|vlist
+ local advance = getfield(current,"height") -- + getfield(current,"depth")
+ if advance ~= 0 then
+ local state, skipped = checked(advance,"rule")
+ if trace_state then
+ report_state("%-7s > column %s, state %a, rule, advance %p, height %p","rule",column,state,advance,inserttotal,height)
+ if skipped ~= 0 then
+ report_state("%-7s > column %s, discarded %p","rule",column,skipped)
end
- height = height + depth + skip
- depth = 0
- skip = height > 0 and advance or 0
end
- elseif id == kern_code then
- local advance = current.kern
- if advance ~= 0 then
- local state, skipped = checked(advance,"kern")
+ if state == "quit" then
+ return true
+ end
+ height = height + depth + skip + advance
+ -- if state == "next" then
+ -- height = height + nextskips
+ -- else
+ -- height = height + currentskips
+ -- end
+ depth = getfield(current,"depth")
+ skip = 0
+ end
+ lastcontent = current
+ end
+
+ -- okay, here we could do some badness like magic but we want something
+ -- predictable and even better: strategies .. so eventually this will
+ -- become installable
+ --
+ -- [chapter] [penalty] [section] [penalty] [first line]
+
+ local function process_penalty(current,nxt)
+ local penalty = getfield(current,"penalty")
+ if penalty == 0 then
+ unlock(penalty)
+ elseif penalty == forcedbreak then
+ local needed = getattribute(current,a_checkedbreak)
+ local proceed = not needed or needed == 0
+ if not proceed then
+ local available = target - height
+ proceed = needed >= available
if trace_state then
- report_state("%-7s > column %s, state %a, advance %p, height %p, state %a","kern",column,state,advance,height)
- if skipped ~= 0 then
- report_state("%-7s > column %s, discarded %p","kern",column,skipped)
- end
- end
- if state == "quit" then
- break
+ report_state("cycle: %s, column %s, available %p, needed %p, %s break",cycle,column,available,needed,proceed and "forcing" or "ignoring")
end
- height = height + depth + skip + advance
- depth = 0
- skip = 0
end
- elseif id == penalty_code then
- local penalty = current.penalty
- if penalty == 0 then
- -- don't bother
- elseif penalty == forcedbreak then
+ if proceed then
+ unlock(penalty)
local okay, skipped = gotonext()
if okay then
if trace_state then
- report_state("cycle: %s, forced column break (same page)",cycle)
+ report_state("cycle: %s, forced column break, same page",cycle)
if skipped ~= 0 then
report_state("%-7s > column %s, discarded %p","penalty",column,skipped)
end
end
else
if trace_state then
- report_state("cycle: %s, forced column break (next page)",cycle)
+ report_state("cycle: %s, forced column break, next page",cycle)
if skipped ~= 0 then
report_state("%-7s > column %s, discarded %p","penalty",column,skipped)
end
end
- break
+ return true
end
- else
- -- todo: nobreak etc ... we might need to backtrack so we need to remember
- -- the last acceptable break
- -- club and widow and such i.e. resulting penalties (if we care)
end
+ elseif penalty < 0 then
+ -- we don't care too much
+ unlock(penalty)
+ elseif penalty >= 10000 then
+ if not lastcurrent then
+ lock(penalty,current)
+ elseif penalty > lastlocked then
+ lock(penalty)
+ end
+ else
+ unlock(penalty)
end
-if lastcolumn == column then
- nxt = current.next -- can have changed
-end
+ end
+
+ local function process_list(current,nxt)
+ local nxtid = nxt and getid(nxt)
+ line = line + 1
+ local inserts, currentskips, nextskips, inserttotal = nil, 0, 0, 0
+ local advance = getfield(current,"height") -- + getfield(current,"depth")
+ if trace_state then
+ report_state("%-7s > column %s, content: %s","line",column,listtoutf(getlist(current),true,true))
+ end
+ if nxt and (nxtid == insert_code or nxtid == mark_code) then
+ nxt, inserts, localskips, insertskips, inserttotal = collectinserts(result,nxt,nxtid)
+ end
+ local state, skipped = checked(advance+inserttotal+currentskips,"line",lastlocked)
+ if trace_state then
+ report_state("%-7s > column %s, state %a, line %s, advance %p, insert %p, height %p","line",column,state,line,advance,inserttotal,height)
+ if skipped ~= 0 then
+ report_state("%-7s > column %s, discarded %p","line",column,skipped)
+ end
+ end
+ if state == "quit" then
+ return true
+ end
+ height = height + depth + skip + advance + inserttotal
+ if state == "next" then
+ height = height + nextskips
+ else
+ height = height + currentskips
+ end
+ depth = getfield(current,"depth")
+ skip = 0
+ if inserts then
+ -- so we already collect them ... makes backtracking tricky ... alternatively
+ -- we can do that in a separate loop ... no big deal either
+ appendinserts(result.inserts,inserts)
+ end
+ if trace_state then
+ report_state("%-7s > column %s, height %p, depth %p, skip %p","line",column,height,depth,skip)
+ end
+ lastcontent = current
+ end
+
+local kept = head
+
+ while current do
+
+ local id = getid(current)
+ local nxt = getnext(current)
+
+ backtracked = false
+
+ -- print("process",nodetostring(current))
+
+ if id == hlist_code or id == vlist_code then
+ if process_list(current,nxt) then break end
+ elseif id == glue_code then
+ if process_skip(current,nxt) then break end
+ elseif id == kern_code then
+ if process_kern(current,nxt) then break end
+ elseif id == penalty_code then
+ if process_penalty(current,nxt) then break end
+ elseif id == rule_code then
+ if process_rule(current,nxt) then break end
+ else
+ end
+
+ if backtracked then
+ -- print("pickup",nodetostring(current))
+ nxt = current
+ else
+ -- print("move on",nodetostring(current))
+ end
+
if nxt then
current = nxt
elseif head == lasthead then
@@ -437,16 +679,19 @@ end
break
end
end
+
if not current then
if trace_state then
- report_state("nilling rest")
+ report_state("nothing left")
end
- rest = nil
- elseif rest == lasthead then
+ -- needs well defined case
+ -- rest = nil
+ elseif rest == lasthead then
if trace_state then
- report_state("nilling rest as rest is lasthead")
+ report_state("rest equals lasthead")
end
- rest = nil
+ -- test case: x\index{AB} \index{AA}x \blank \placeindex
+ -- makes line disappear: rest = nil
end
if stripbottom then
@@ -466,7 +711,7 @@ end
specification.overflow = overflow
specification.discarded = discarded
- texbox[specification.box].head = nil
+ setfield(getbox(specification.box),"list",nil)
return specification
end
@@ -478,12 +723,12 @@ function mixedcolumns.finalize(result)
local r = results[i]
local h = r.head
if h then
- h.prev = nil
+ setfield(h,"prev",nil)
local t = r.tail
if t then
- t.next = nil
+ setfield(t,"next",nil)
else
- h.next = nil
+ setfield(h,"next",nil)
r.tail = h
end
for c, list in next, r.inserts do
@@ -492,13 +737,13 @@ function mixedcolumns.finalize(result)
local l = list[i]
local h = new_hlist()
t[i] = h
- h.head = l.head
- h.height = l.height
- h.depth = l.depth
- l.head = nil
+ setfield(h,"list",getfield(l,"head"))
+ setfield(h,"height",getfield(l,"height"))
+ setfield(h,"depth",getfield(l,"depth"))
+ setfield(l,"head",nil)
end
- t[1].prev = nil -- needs checking
- t[#t].next = nil -- needs checking
+ setfield(t[1],"prev",nil) -- needs checking
+ setfield(t[#t],"next",nil) -- needs checking
r.inserts[c] = t
end
end
@@ -556,9 +801,6 @@ function mixedcolumns.setsplit(specification)
end
end
-local topskip_code = gluecodes.topskip
-local baselineskip_code = gluecodes.baselineskip
-
function mixedcolumns.getsplit(result,n)
if not result then
report_state("flush, column %s, no result",n)
@@ -573,13 +815,13 @@ function mixedcolumns.getsplit(result,n)
return new_glue(result.originalwidth)
end
- h.prev = nil -- move up
+ setfield(h,"prev",nil) -- move up
local strutht = result.strutht
local strutdp = result.strutdp
local lineheight = strutht + strutdp
local v = new_vlist()
- v.head = h
+ setfield(v,"list",h)
-- local v = vpack(h,"exactly",height)
@@ -601,23 +843,24 @@ function mixedcolumns.getsplit(result,n)
dp = result.depth
end
- v.width = wd
- v.height = ht
- v.depth = dp
+ setfield(v,"width",wd)
+ setfield(v,"height",ht)
+ setfield(v,"depth",dp)
if trace_state then
- local id = h.id
+ local id = getid(h)
if id == hlist_code then
- report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",nodes.toutf(h.list))
+ report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",listtoutf(getlist(h)))
else
report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"head node",nodecodes[id])
end
end
for c, list in next, r.inserts do
- -- tex.setbox("global",c,vpack(nodes.concat(list)))
- -- tex.setbox(c,vpack(nodes.concat(list)))
- texbox[c] = vpack(nodes.concat(list))
+ local l = concatnodes(list)
+ local b = vpack(l) -- multiple arguments, todo: fastvpack
+ -- setbox("global",c,b)
+ setbox(c,b)
r.inserts[c] = nil
end
@@ -661,7 +904,7 @@ end
function commands.mixgetsplit(n)
if result then
- context(mixedcolumns.getsplit(result,n))
+ context(tonode(mixedcolumns.getsplit(result,n)))
end
end
@@ -673,13 +916,13 @@ end
function commands.mixflushrest()
if result then
- context(mixedcolumns.getrest(result))
+ context(tonode(mixedcolumns.getrest(result)))
end
end
function commands.mixflushlist()
if result then
- context(mixedcolumns.getlist(result))
+ context(tonode(mixedcolumns.getlist(result)))
end
end
diff --git a/Master/texmf-dist/tex/context/base/page-mix.mkiv b/Master/texmf-dist/tex/context/base/page-mix.mkiv
index abdcced1f0d..41897f6ddc7 100644
--- a/Master/texmf-dist/tex/context/base/page-mix.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-mix.mkiv
@@ -28,6 +28,7 @@
% top and bottom inserts
% wide floats
% move floats
+% offsets (inner ones, so we change the hsize ... needed with backgrounds
% luatex buglet:
%
@@ -55,7 +56,11 @@
\setupmixedcolumns
[\c!distance=1.5\bodyfontsize,
\c!n=\plustwo,
- %\c!rule=\v!none,
+ %\c!align=, % inherit
+ %\c!before=,
+ %\c!after=,
+ %\c!separator=\v!none,
+ %\c!setups=,
\c!frame=\v!off,
\c!strut=\v!no,
\c!offset=\v!overlay,
@@ -64,12 +69,13 @@
\c!maxwidth=\makeupwidth,
\c!grid=\v!tolerant,
\c!step=.25\lineheight, % needs some experimenting
+ %\c!splitmethod=\v!fixed, % will be default
\c!method=\ifinner\s!box\else\s!otr\fi] % automatic as suggested by WS
\let\startmixedcolumns\relax % defined later
\let\stopmixedcolumns \relax % defined later
-\appendtoks
+\appendtoks % could become an option
\setuevalue{\e!start\currentmixedcolumns}{\startmixedcolumns[\currentmixedcolumns]}%
\setuevalue{\e!stop \currentmixedcolumns}{\stopmixedcolumns}%
\to \everydefinemixedcolumns
@@ -158,7 +164,8 @@
\definemixedcolumns
[\s!itemgroupcolumns]
[\c!n=\itemgroupparameter\c!n,
- \c!rule=\v!off,
+ \c!separator=\v!none,
+ \c!splitmethod=\v!none,
\c!balance=\v!yes]
\unexpanded\def\strc_itemgroups_start_columns
@@ -219,7 +226,7 @@
\unexpanded\def\page_mix_command_set_hsize
{\hsize\d_page_mix_column_width
- \textwidth\d_page_mul_used_width} % needs thinking ... grouping etc
+ \textwidth\d_page_mix_column_width}
%D When setting the vsize we make sure that we collect a few more lines than needed
%D so that we have enough to split over the columns. Collecting too much is somewhat
@@ -249,7 +256,8 @@
{\goodbreak}
\installcolumnbreakmethod \s!mixedcolumn \v!yes
- {\penalty\c_page_mix_break_forced\relax}
+ {\par
+ \penalty\c_page_mix_break_forced\relax}
%D As we operate in grid snapping mode, we use a dedicated macro to enable this
%D mechamism.
@@ -289,7 +297,7 @@
\unexpanded\def\page_mix_command_inject_separator
{\bgroup
\hss
- \csname\??mixedcolumnsseparator\mixedcolumnsparameter\c!separator\endcsname
+ \csname\??mixedcolumnsseparator\mixedcolumnsparameter\c!separator\endcsname % was \c!rule
\hss
\egroup}
@@ -478,12 +486,23 @@
%
\page_mix_command_set_vsize
\page_mix_command_set_hsize
- \fi}
+ \fi
+ \usealignparameter\mixedcolumnsparameter
+ \usesetupsparameter\mixedcolumnsparameter}
+
+% \setvalue{\??mixedcolumnsstop\s!otr}%
+% {\par
+% \ifcase\c_page_mix_otr_nesting\or
+% \c_page_mix_routine\c_page_mix_routine_balance
+% \page_otr_trigger_output_routine
+% \fi}
\setvalue{\??mixedcolumnsstop\s!otr}%
{\par
\ifcase\c_page_mix_otr_nesting\or
- \c_page_mix_routine\c_page_mix_routine_balance
+ \doifelse{\mixedcolumnsparameter\c!balance}\v!yes
+ {\c_page_mix_routine\c_page_mix_routine_balance}%
+ {\penalty-\plustenthousand}% weird hack, we need to trigger the otr sometimes (new per 20140306, see balancing-001.tex)
\page_otr_trigger_output_routine
\fi}
@@ -500,7 +519,8 @@
%D footnotes. Eventually we will have multiple strategies available.
\unexpanded\def\page_mix_routine_construct#1%
- {\ctxcommand{mixsetsplit {
+ {\d_page_mix_max_height\mixedcolumnsparameter\c!maxheight % can have changed due to header=high
+ \ctxcommand{mixsetsplit {
box = \number\b_page_mix_collected,
nofcolumns = \number\c_page_mix_n_of_columns,
maxheight = \number\d_page_mix_max_height,
@@ -511,6 +531,7 @@
strutht = \number\strutht,
strutdp = \number\strutdp,
threshold = \number\d_page_mix_threshold,
+ splitmethod = "\mixedcolumnsparameter\c!splitmethod",
balance = "#1",
alternative = "\mixedcolumnsparameter\c!alternative",
grid = \ifgridsnapping true\else false\fi,
@@ -521,16 +542,22 @@
{\ctxcommand{mixfinalize()}%
\setbox\b_page_mix_collected\vbox \bgroup
\ifvoid\b_page_mix_preceding \else
+ \page_postprocessors_linenumbers_deepbox\b_page_mix_preceding
\box\b_page_mix_preceding
\global\d_page_mix_preceding_height\zeropoint
\nointerlineskip
\fi
\hskip\d_page_mix_leftskip
\page_mix_hbox to \d_page_mix_max_width \bgroup
- \letmixedcolumnsparameter\c!strut\v!no
- % maybe use \c_page_mix_used_of_columns
- \dorecurse\c_page_mix_n_of_columns {%
- \inheritedmixedcolumnsframed{\page_mix_command_package_column}%
+ \dorecurse\c_page_mix_n_of_columns{%
+ % needs packaging anyway
+ \setbox\scratchbox\page_mix_command_package_column
+ \page_marks_synchronize_column\plusone\c_page_mix_n_of_columns\recurselevel\scratchbox
+ % for the moment a quick and dirty patch .. we need to go into the box (hence the \plusone) .. a slowdowner
+ \page_lines_add_numbers_to_box\scratchbox\recurselevel\c_page_mix_n_of_columns\plusone
+ % the framed needs a reset of strut, align, setups etc
+ \inheritedmixedcolumnsframedbox\currentmixedcolumns\scratchbox
+ % optional
\ifnum\recurselevel<\c_page_mix_n_of_columns
\page_mix_command_inject_separator
\fi
@@ -717,7 +744,7 @@
% \global\setfalse\c_page_floats_room
% \else
\global\settrue\c_page_floats_room
- \fi
+ \fi\fi
\ifdim\floatwidth>\hsize
\showmessage\m!columns{11}\empty
\global\setfalse\c_page_floats_room
@@ -759,6 +786,21 @@
\unexpanded\def\page_mix_command_next_page_and_inserts
{\page_otr_eject_page_and_flush_inserts}
+%D Moved here and dedicated:
+
+\unexpanded\def\page_mix_command_test_column
+ {\dodoubleempty\page_mix_command_test_column_indeed}
+
+\unexpanded\def\page_mix_command_test_column_indeed[#1][#2]% works on last column
+ {\par
+ \begingroup
+ \scratchdimen\dimexpr#1\lineheight\ifsecondargument+#2\fi\relax
+ \ifdim\scratchdimen>\zeropoint
+ \attribute\checkedbreakattribute\number\scratchdimen
+ \penalty\c_page_mix_break_forced\relax
+ \fi
+ \endgroup}
+
%D We need to hook some handlers into the output routine and we define
%D a dedicated one:
@@ -782,8 +824,9 @@
\s!page_otr_command_side_float_output =\page_mix_command_side_float_output,
\s!page_otr_command_flush_floats =\page_mix_command_flush_floats,
\s!page_otr_command_flush_side_floats =\page_mix_command_flush_side_floats,
- \s!page_otr_command_flush_saved_floats =\page_mix_command_flush_saved_floats
+ \s!page_otr_command_flush_saved_floats =\page_mix_command_flush_saved_floats,
% \s!page_otr_command_flush_margin_blocks =\page_mix_command_flush_margin_blocks, % not used
+ \s!page_otr_command_test_column =\page_mix_command_test_column
]
%D Only a few float placement options are supported:
diff --git a/Master/texmf-dist/tex/context/base/page-mrk.mkiv b/Master/texmf-dist/tex/context/base/page-mrk.mkiv
index 5f8d332c56f..6d0a5af943a 100644
--- a/Master/texmf-dist/tex/context/base/page-mrk.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-mrk.mkiv
@@ -173,6 +173,7 @@
\settrue\c_page_marks_add_more_number}
\appendtoks
+ \setfalse\c_page_marks_add_page_lines
\setfalse\c_page_marks_add_more_color
\setfalse\c_page_marks_add_more_marking
\setfalse\c_page_marks_add_more_lines
diff --git a/Master/texmf-dist/tex/context/base/page-mul.mkiv b/Master/texmf-dist/tex/context/base/page-mul.mkiv
index 9e0861af93c..0063b3311c5 100644
--- a/Master/texmf-dist/tex/context/base/page-mul.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-mul.mkiv
@@ -960,7 +960,7 @@
\ifnum\c_page_mul_balance_tries>\c_page_mul_balance_tries_max\relax
\showmessage\m!columns7\empty
\else
- \showmessage\m!columns8{\the\c_page_mul_balance_tries\space}%
+ \showmessage\m!columns8{\the\c_page_mul_balance_tries}%
\fi
\egroup}
@@ -1158,26 +1158,28 @@
\def\setlocalcolumnfloats
{\settrue\onlylocalcolumnfloats
\everypar\everylocalcolumnfloatspar
- \let\page_mul_flush_float\doflushcolumnfloat
+ \let\page_mul_flush_float \doflushcolumnfloat
\let\page_mul_flush_floats\doflushcolumnfloats}
\def\setglobalcolumnfloats
{\setfalse\onlylocalcolumnfloats
\reseteverypar
- \let\page_mul_flush_float\relax
+ \let\page_mul_flush_float \relax
\let\page_mul_flush_floats\noflushcolumnfloats}
- \def\noflushcolumnfloats
- {\bgroup
- \xdef\localsavednoffloats{\the\savednoffloats}%
- \global\savednoffloats\globalsavednoffloats
- \page_otr_command_flush_top_insertions
- \xdef\globalsavenoffloats{\the\savednoffloats}%
- \ifnum\globalsavednoffloats=\zerocount
- \setlocalcolumnfloats
- \fi
- \global\savednoffloats\localsavednoffloats
- \egroup}
+ % \def\noflushcolumnfloats
+ % {\bgroup
+ % \xdef\localsavednoffloats{\the\savednoffloats}%
+ % \global\savednoffloats\globalsavednoffloats
+ % \page_otr_command_flush_top_insertions
+ % \xdef\globalsavenoffloats{\the\savednoffloats}%
+ % \ifnum\globalsavednoffloats=\zerocount
+ % \setlocalcolumnfloats
+ % \fi
+ % \global\savednoffloats\localsavednoffloats
+ % \egroup}
+ %
+ \def\noflushcolumnfloats{\doflushcolumnfloats} % not yet redone
%D We need to calculate the amount of free space in a columns. When there is not
%D enough room, we migrate the float to the next column. These macro's are
@@ -1603,9 +1605,11 @@
\else
\balancecolumnsfalse
\fi
- \installalign\v!yes {\page_columns_align_option_yes }%
- \installalign\v!no {\page_columns_align_option_no }%
- \installalign\v!text{\page_columns_align_option_text}%
+ % % this won't work (blocked by check for overloading; too fuzzy anyway)
+ % \installalign\v!yes {\page_columns_align_option_yes }% \stretchcolumnstrue \inheritcolumnsfalse
+ % \installalign\v!no {\page_columns_align_option_no }% \stretchcolumnsfalse\inheritcolumnsfalse
+ % \installalign\v!text{\page_columns_align_option_text}% \stretchcolumnsfalse\inheritcolumnstrue
+ % %
\stretchcolumnsfalse
\inheritcolumnstrue
\edef\p_align{\columnsparameter\c!align}%
@@ -1763,6 +1767,32 @@
{\directdummyparameter\c!distance}%
{\directdummyparameter\c!n}}
+%D Moved here:
+
+\unexpanded\def\page_mul_command_test_column
+ {\dodoubleempty\page_mul_command_test_column_indeed}
+
+\unexpanded\def\page_mul_command_test_column_indeed[#1][#2]% works on last column
+ {\page_otr_command_flush_top_insertions\endgraf
+ \ifdim\pagegoal<\maxdimen
+ \ifdim\pagetotal<\pagegoal
+ \d_page_tests_test\dimexpr
+ \pagegoal
+ -\pagetotal
+ \ifdim\lastskip<\parskip+\parskip\fi
+ \ifsecondargument+#2\fi
+ \relax
+ \getrawnoflines\d_page_tests_test % (raw)
+ \ifnum#1>\noflines
+ \column
+ \fi
+ \else
+ \penalty-\plustenthousand % (untested)
+ \fi
+ \fi}
+
+%D but fragile anyway.
+
\let\page_mul_command_package_contents\page_one_command_package_contents
\let\page_mul_command_flush_float_box \page_one_command_flush_float_box
@@ -1786,8 +1816,9 @@
\s!page_otr_command_side_float_output =\page_mul_command_side_float_output,
\s!page_otr_command_flush_floats =\page_mul_command_flush_floats,
\s!page_otr_command_flush_side_floats =\page_mul_command_flush_side_floats,
- \s!page_otr_command_flush_saved_floats =\page_mul_command_flush_saved_floats
+ \s!page_otr_command_flush_saved_floats =\page_mul_command_flush_saved_floats,
% \s!page_otr_command_flush_margin_blocks =\page_mul_command_flush_margin_blocks, % not used
+ \s!page_otr_command_test_column =\page_mul_command_test_column
]
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/page-one.mkiv b/Master/texmf-dist/tex/context/base/page-one.mkiv
index 6261938b6e8..3f9dcd7c617 100644
--- a/Master/texmf-dist/tex/context/base/page-one.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-one.mkiv
@@ -1,4 +1,4 @@
-%D \module
+ %D \module
%D [ file=page-one,
%D version=2000.10.20,
%D title=\CONTEXT\ Page Macros,
@@ -587,6 +587,9 @@
\unexpanded\def\page_one_command_synchronize_side_floats
{\page_sides_synchronize_floats}
+\unexpanded\def\page_one_command_test_page
+ {\testpage}
+
\defineoutputroutine
[\s!singlecolumn]
[\s!page_otr_command_routine =\page_one_command_routine,
@@ -608,7 +611,8 @@
\s!page_otr_command_flush_floats =\page_one_command_flush_floats,
\s!page_otr_command_flush_side_floats =\page_one_command_flush_side_floats,
\s!page_otr_command_flush_saved_floats =\page_one_command_flush_saved_floats,
- \s!page_otr_command_flush_margin_blocks =\page_one_command_flush_margin_blocks
+ \s!page_otr_command_flush_margin_blocks =\page_one_command_flush_margin_blocks,
+ \s!page_otr_command_test_column =\page_one_command_test_page
]
% \setupoutputroutine
diff --git a/Master/texmf-dist/tex/context/base/page-otr.mkvi b/Master/texmf-dist/tex/context/base/page-otr.mkvi
index e5433c866fa..e146d99b7ba 100644
--- a/Master/texmf-dist/tex/context/base/page-otr.mkvi
+++ b/Master/texmf-dist/tex/context/base/page-otr.mkvi
@@ -256,6 +256,7 @@
\definesystemconstant{page_otr_command_flush_side_floats}
\definesystemconstant{page_otr_command_flush_saved_floats}
\definesystemconstant{page_otr_command_flush_margin_blocks}
+\definesystemconstant{page_otr_command_test_column}
\definesystemconstant{singlecolumn}
\definesystemconstant{multicolumn} % will move
@@ -281,7 +282,8 @@
\s!page_otr_command_flush_floats,
\s!page_otr_command_flush_side_floats,
\s!page_otr_command_flush_saved_floats,
- \s!page_otr_command_flush_margin_blocks]
+ \s!page_otr_command_flush_margin_blocks,
+ \s!page_otr_command_test_column]
\appendtoks
\setupoutputroutine[\s!singlecolumn]%
diff --git a/Master/texmf-dist/tex/context/base/page-pst.lua b/Master/texmf-dist/tex/context/base/page-pst.lua
index 8586830cf24..50580ae3316 100644
--- a/Master/texmf-dist/tex/context/base/page-pst.lua
+++ b/Master/texmf-dist/tex/context/base/page-pst.lua
@@ -8,15 +8,23 @@ if not modules then modules = { } end modules ['page-pst'] = {
-- todo: adapt message
+local tonumber, next = tonumber, next
local format, validstring = string.format, string.valid
local sortedkeys = table.sortedkeys
+local context = context
+local commands = commands
+
+local texgetcount = tex.getcount
+local texsetcount = tex.setcount
+
local cache = { }
local function flush(page)
local c = cache[page]
if c then
for i=1,#c do
+ -- characters.showstring(c[i])
context.viafile(c[i],format("page.%s",validstring(page,"nopage")))
end
cache[page] = nil
@@ -32,14 +40,14 @@ local function setnextpage()
elseif n > 0 then
-- upcoming page (realpageno)
end
- tex.setcount("global","c_page_postponed_blocks_next_page",n)
+ texsetcount("global","c_page_postponed_blocks_next_page",n)
end
function commands.flushpostponedblocks(page)
-- we need to flush previously pending pages as well and the zero
-- slot is the generic one so that one is always flushed
local t = sortedkeys(cache)
- local p = tonumber(page) or tex.count.realpageno or 0
+ local p = tonumber(page) or texgetcount("realpageno") or 0
for i=1,#t do
local ti = t[i]
if ti <= p then
@@ -54,7 +62,7 @@ end
function commands.registerpostponedblock(page)
if type(page) == "string" then
if string.find(page,"^+") then
- page = tex.count.realpageno + (tonumber(page) or 1) -- future delta page
+ page = texgetcount("realpageno") + (tonumber(page) or 1) -- future delta page
else
page = tonumber(page) or 0 -- preferred page or otherwise first possible occasion
end
diff --git a/Master/texmf-dist/tex/context/base/page-pst.mkiv b/Master/texmf-dist/tex/context/base/page-pst.mkiv
index 7f8a39ca6b4..704289246a3 100644
--- a/Master/texmf-dist/tex/context/base/page-pst.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-pst.mkiv
@@ -78,6 +78,7 @@
%\flushrestfloats
\page_floats_flush_page_floats
\setnormalcatcodes % postponing in verbatim
+ \uncatcodespacetokens % postponing in startlines
\restoreglobalbodyfont % otherwise problems inside split verbatim
\ctxcommand{flushpostponedblocks()}%
\relax
diff --git a/Master/texmf-dist/tex/context/base/page-run.mkiv b/Master/texmf-dist/tex/context/base/page-run.mkiv
index dabf3725292..1f2551ebc17 100644
--- a/Master/texmf-dist/tex/context/base/page-run.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-run.mkiv
@@ -79,13 +79,27 @@ local function todimen(name,unit,fmt)
return number.todimen(tex.dimen[name],unit,fmt)
end
-function commands.showlayoutvariables(options)
-
- if options == "" then
+local function checkedoptions(options)
+ if type(options) == "table" then
+ return options
+ elseif not options or options == "" then
options = "pt,cm"
end
+ options = utilities.parsers.settings_to_hash(options)
+ local n = 4
+ for k, v in table.sortedhash(options) do
+ local m = tonumber(k)
+ if m then
+ n = m
+ end
+ end
+ options.n = n
+ return options
+end
+
+function commands.showlayoutvariables(options)
- local options = utilities.parsers.settings_to_hash(options)
+ options = checkedoptions(options)
local dimensions = { "pt", "bp", "cm", "mm", "dd", "cc", "pc", "nd", "nc", "sp", "in" }
@@ -215,6 +229,8 @@ end
function commands.showlayout(options)
+ options = checkedoptions(options)
+
if tex.count.textlevel == 0 then
commands.showlayoutvariables(options)
@@ -225,7 +241,7 @@ function commands.showlayout(options)
context.bgroup()
context.showframe()
context.setuplayout { marking = interfaces.variables.on }
- for i=1,4 do
+ for i=1,(options.n or 4) do
commands.showlayoutvariables(options)
context.page()
end
diff --git a/Master/texmf-dist/tex/context/base/page-str.lua b/Master/texmf-dist/tex/context/base/page-str.lua
index f6314657f01..f2ac27cd9c1 100644
--- a/Master/texmf-dist/tex/context/base/page-str.lua
+++ b/Master/texmf-dist/tex/context/base/page-str.lua
@@ -12,18 +12,25 @@ if not modules then modules = { } end modules ['page-str'] = {
local concat, insert, remove = table.concat, table.insert, table.remove
-local find_tail, write_node, free_node, copy_nodelist = node.slide, node.write, node.free, node.copy_list
-local vpack_nodelist, hpack_nodelist = node.vpack, node.hpack
-local texdimen, texbox = tex.dimen, tex.box
-local settings_to_array = utilities.parsers.settings_to_array
-
local nodes, node = nodes, node
-local nodepool = nodes.pool
-local tasks = nodes.tasks
+local nodepool = nodes.pool
+local tasks = nodes.tasks
+
+local new_kern = nodepool.kern
+local new_glyph = nodepool.glyph
+
+local slide_nodelist = node.slide
+local write_node = node.write
+local free_node = node.free
+local copy_nodelist = node.copy_list
+local vpack_nodelist = node.vpack
+local hpack_nodelist = node.hpack
+
+local settings_to_array = utilities.parsers.settings_to_array
-local new_kern = nodepool.kern
-local new_glyph = nodepool.glyph
+local texgetdimen = tex.getdimen
+local texgetbox = tex.getbox
local trace_collecting = false trackers.register("streams.collecting", function(v) trace_collecting = v end)
local trace_flushing = false trackers.register("streams.flushing", function(v) trace_flushing = v end)
@@ -66,7 +73,7 @@ function streams.collect(head,where)
end
local last = dana[#dana]
if last then
- local tail = find_tail(last)
+ local tail = slide_nodelist(last)
tail.next, head.prev = head, tail
elseif last == false then
dana[#dana] = head
@@ -175,7 +182,8 @@ function streams.synchronize(list) -- this is an experiment !
if trace_flushing then
report_streams("slot %s has max height %p and max depth %p",m,height,depth)
end
- local strutht, strutdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth
+ local strutht = texgetdimen("globalbodyfontstrutheight")
+ local strutdp = texgetdimen("globalbodyfontstrutdepth")
local struthtdp = strutht + strutdp
for i=1,#list do
local name = list[i]
@@ -194,11 +202,11 @@ function streams.synchronize(list) -- this is an experiment !
else
-- this is not yet ok as we also need to keep an eye on vertical spacing
-- so we might need to do some splitting or whatever
- local tail = vbox.list and find_tail(vbox.list)
+ local tail = vbox.list and slide_nodelist(vbox.list)
local n, delta = 0, delta_height -- for tracing
while delta > 0 do
-- we need to add some interline penalties
- local line = copy_nodelist(tex.box.strutbox)
+ local line = copy_nodelist(texgetbox("strutbox"))
line.height, line.depth = strutht, strutdp
if tail then
tail.next, line.prev = line, tail
diff --git a/Master/texmf-dist/tex/context/base/page-str.mkiv b/Master/texmf-dist/tex/context/base/page-str.mkiv
index 200a7137774..a8fab9c6cbb 100644
--- a/Master/texmf-dist/tex/context/base/page-str.mkiv
+++ b/Master/texmf-dist/tex/context/base/page-str.mkiv
@@ -29,8 +29,6 @@
%D
%D Remark: marknotes are gone, at least for a while.
-\writestatus{loading}{ConTeXt Page Macros / Page Streams}
-
\registerctxluafile{page-str}{1.001}
\unprotect
diff --git a/Master/texmf-dist/tex/context/base/page-txt.mkvi b/Master/texmf-dist/tex/context/base/page-txt.mkvi
index 707af25e9bb..6d8d50028d9 100644
--- a/Master/texmf-dist/tex/context/base/page-txt.mkvi
+++ b/Master/texmf-dist/tex/context/base/page-txt.mkvi
@@ -269,7 +269,7 @@
\let\m_page_layouts_element_content\empty
\unexpanded\def\page_layouts_process_element_single#style#color#width#content%
- {\edef\m_page_layouts_element_content{\detokenize{#content}}%
+ {\edef\m_page_layouts_element_content{\detokenize{#content}}% so no \v!xxx
\ifx\m_page_layouts_element_content\empty
% should not happen too often
\else
@@ -440,12 +440,12 @@
\def\page_layouts_set_text_content[#vertical][#horizontal][#one][#two][#three]% header text middle text/text
{\iffifthargument
- \setvalue{\namedlayoutelementhash{#vertical:#horizontal}\executeifdefined{\??layouttextcontent\c!text:#one}\c!middletext}%
+ \setvalue{\namedlayoutelementhash{#vertical:#horizontal}\executeifdefined{\??layouttextcontent\v!text:#one}\c!middletext}%
{\page_layouts_process_element_double
\c!leftstyle \c!leftcolor \c!leftwidth {#two}%
\c!rightstyle\c!rightcolor\c!rightwidth{#three}}%
\else\iffourthargument
- \setvalue{\namedlayoutelementhash{#vertical:#horizontal}\executeifdefined{\??layouttextcontent\c!text:#one}\c!middletext}%
+ \setvalue{\namedlayoutelementhash{#vertical:#horizontal}\executeifdefined{\??layouttextcontent\v!text:#one}\c!middletext}%
{\page_layouts_process_element_double
\c!leftstyle \c!leftcolor \c!leftwidth {#two}%
\c!rightstyle\c!rightcolor\c!rightwidth{#two}}%
@@ -462,16 +462,16 @@
\def\page_layouts_reset_text_content[#vertical][#horizontal][#tag]% header text middle
{\edef\currentlayoutelement{#vertical:#horizontal}%
\ifthirdargument
- \letvalueempty{\layoutelementhash\executeifdefined{\??layouttextcontent\c!text:#tag}\c!middletext}%
+ \letvalueempty{\layoutelementhash\executeifdefined{\??layouttextcontent\v!text:#tag}\c!middletext}%
\else\ifsecondargument
\resetlayoutelementparameter\c!lefttext
\resetlayoutelementparameter\c!middletext
\resetlayoutelementparameter\c!righttext
\fi\fi}
-\letvalue{\??layouttextcontent\c!middle:\c!text}\c!middletext
-\letvalue{\??layouttextcontent\c!left :\c!text}\c!lefttext
-\letvalue{\??layouttextcontent\c!right :\c!text}\c!righttext
+\letvalue{\??layouttextcontent\c!middle:\v!text}\c!middletext
+\letvalue{\??layouttextcontent\c!left :\v!text}\c!lefttext
+\letvalue{\??layouttextcontent\c!right :\v!text}\c!righttext
%D The placement of a whole line is handled by the next two
%D macros. These are hooked into the general purpose token
diff --git a/Master/texmf-dist/tex/context/base/pdfr-def.mkii b/Master/texmf-dist/tex/context/base/pdfr-def.mkii
index 7554bda9e92..b3f67b93f82 100644
--- a/Master/texmf-dist/tex/context/base/pdfr-def.mkii
+++ b/Master/texmf-dist/tex/context/base/pdfr-def.mkii
@@ -1,4 +1,4 @@
-% filename : pdfr-def.tex
+% filename : pdfr-def.mkii
% comment : generated by mtxrun --script chars --pdf
% author : Hans Hagen, PRAGMA-ADE, Hasselt NL
% copyright: PRAGMA ADE / ConTeXt Development Team
diff --git a/Master/texmf-dist/tex/context/base/phys-dim.lua b/Master/texmf-dist/tex/context/base/phys-dim.lua
index 45a99978dc3..870cbd29b0e 100644
--- a/Master/texmf-dist/tex/context/base/phys-dim.lua
+++ b/Master/texmf-dist/tex/context/base/phys-dim.lua
@@ -39,6 +39,7 @@ if not modules then modules = { } end modules ['phys-dim'] = {
-- RevPerSec = [[RPS]],
-- RevPerMin = [[RPM]],
+local rawset, next = rawset, next
local V, P, S, R, C, Cc, Cs, matchlpeg = lpeg.V, lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.match
local format, lower = string.format, string.lower
local appendlpeg = lpeg.append
@@ -53,6 +54,9 @@ local variables = interfaces.variables
local v_reverse = variables.reverse
local allocate = utilities.storage.allocate
+local context = context
+local commands = commands
+
local trace_units = false
local report_units = logs.reporter("units")
@@ -381,13 +385,15 @@ local long_operators = {
local long_suffixes = {
- Linear = "linear",
- Square = "square",
- Cubic = "cubic",
- Inverse = "inverse",
- ILinear = "ilinear",
- ISquare = "isquare",
- ICubic = "icubic",
+ Linear = "linear",
+ Square = "square",
+ Cubic = "cubic",
+ Quadratic = "quadratic",
+ Inverse = "inverse",
+ ILinear = "ilinear",
+ ISquare = "isquare",
+ ICubic = "icubic",
+ IQuadratic = "iquadratic",
}
@@ -454,23 +460,29 @@ local short_suffixes = { -- maybe just raw digit match
["1"] = "linear",
["2"] = "square",
["3"] = "cubic",
+ ["4"] = "quadratic",
["+1"] = "linear",
["+2"] = "square",
["+3"] = "cubic",
+ ["+4"] = "quadratic",
["-1"] = "inverse",
["-1"] = "ilinear",
["-2"] = "isquare",
["-3"] = "icubic",
+ ["-4"] = "iquadratic",
["^1"] = "linear",
["^2"] = "square",
["^3"] = "cubic",
+ ["^4"] = "quadratic",
["^+1"] = "linear",
["^+2"] = "square",
["^+3"] = "cubic",
+ ["^+4"] = "quadratic",
["^-1"] = "inverse",
["^-1"] = "ilinear",
["^-2"] = "isquare",
["^-3"] = "icubic",
+ ["^-4"] = "iquadratic",
}
local symbol_units = {
@@ -495,20 +507,20 @@ local packaged_units = {
-- rendering:
-local unitsPUS = context.unitsPUS
-local unitsPU = context.unitsPU
-local unitsPS = context.unitsPS
-local unitsP = context.unitsP
-local unitsUS = context.unitsUS
-local unitsU = context.unitsU
-local unitsS = context.unitsS
-local unitsO = context.unitsO
-local unitsN = context.unitsN
-local unitsC = context.unitsC
-local unitsQ = context.unitsQ
-local unitsNstart = context.unitsNstart
-local unitsNstop = context.unitsNstop
-local unitsNspace = context.unitsNspace
+local ctx_unitsPUS = context.unitsPUS
+local ctx_unitsPU = context.unitsPU
+local ctx_unitsPS = context.unitsPS
+local ctx_unitsP = context.unitsP
+local ctx_unitsUS = context.unitsUS
+local ctx_unitsU = context.unitsU
+local ctx_unitsS = context.unitsS
+local ctx_unitsO = context.unitsO
+local ctx_unitsN = context.unitsN
+local ctx_unitsC = context.unitsC
+local ctx_unitsQ = context.unitsQ
+local ctx_unitsNstart = context.unitsNstart
+local ctx_unitsNstop = context.unitsNstop
+local ctx_unitsNspace = context.unitsNspace
local labels = languages.data.labels
@@ -575,7 +587,7 @@ labels.units = allocate {
lumen = { labels = { en = [[lm]] } },
lux = { labels = { en = [[lx]] } },
bequerel = { labels = { en = [[Bq]] } },
- gray = { labels = { en = [[Gr]] } },
+ gray = { labels = { en = [[Gy]] } },
sievert = { labels = { en = [[Sv]] } },
katal = { labels = { en = [[kat]] } },
minute = { labels = { en = [[min]] } },
@@ -635,13 +647,15 @@ labels.operators = allocate {
}
labels.suffixes = allocate {
- linear = { labels = { en = [[1]] } },
- square = { labels = { en = [[2]] } },
- cubic = { labels = { en = [[3]] } },
- inverse = { labels = { en = [[-1]] } },
- ilinear = { labels = { en = [[-1]] } },
- isquare = { labels = { en = [[-2]] } },
- icubic = { labels = { en = [[-3]] } },
+ linear = { labels = { en = [[1]] } },
+ square = { labels = { en = [[2]] } },
+ cubic = { labels = { en = [[3]] } },
+ quadratic = { labels = { en = [[4]] } },
+ inverse = { labels = { en = [[-1]] } },
+ ilinear = { labels = { en = [[-1]] } },
+ isquare = { labels = { en = [[-2]] } },
+ icubic = { labels = { en = [[-3]] } },
+ iquadratic = { labels = { en = [[-4]] } },
}
local function dimpus(p,u,s)
@@ -651,28 +665,28 @@ local function dimpus(p,u,s)
if p ~= "" then
if u ~= "" then
if s ~= "" then
- unitsPUS(p,u,s)
+ ctx_unitsPUS(p,u,s)
else
- unitsPU(p,u)
+ ctx_unitsPU(p,u)
end
elseif s ~= "" then
- unitsPS(p,s)
+ ctx_unitsPS(p,s)
else
- unitsP(p)
+ ctx_unitsP(p)
end
else
if u ~= "" then
if s ~= "" then
- unitsUS(u,s)
+ ctx_unitsUS(u,s)
-- elseif c then
- -- unitsC(u)
+ -- ctx_unitsC(u)
else
- unitsU(u)
+ ctx_unitsU(u)
end
elseif s ~= "" then
- unitsS(s)
+ ctx_unitsS(s)
else
- unitsP(p)
+ ctx_unitsP(p)
end
end
end
@@ -686,7 +700,7 @@ local function dimop(o)
report_units("operator %a",o)
end
if o then
- unitsO(o)
+ ctx_unitsO(o)
end
end
@@ -696,7 +710,7 @@ local function dimsym(s)
end
s = symbol_units[s] or s
if s then
- unitsC(s)
+ ctx_unitsC(s)
end
end
@@ -706,7 +720,7 @@ local function dimpre(p)
end
p = packaged_units[p] or p
if p then
- unitsU(p)
+ ctx_unitsU(p)
end
end
@@ -776,7 +790,7 @@ local function update_parsers() -- todo: don't remap utf sequences
* (V("packaged") / dimpre)
* V("somespace"),
-- someunknown = V("somespace")
- -- * (V("nospace")/unitsU)
+ -- * (V("nospace")/ctx_unitsU)
-- * V("somespace"),
--
combination = V("longprefix") * V("longunit") -- centi meter
@@ -791,7 +805,7 @@ local function update_parsers() -- todo: don't remap utf sequences
+ (V("longsuffix") * V("combination")) / dimspu
+ (V("combination") * (V("shortsuffix") + V("nothing"))) / dimpus
)
- * (V("qualifier") / unitsQ)^-1
+ * (V("qualifier") / ctx_unitsQ)^-1
* V("somespace"),
operator = V("somespace")
* ((V("longoperator") + V("shortoperator")) / dimop)
@@ -811,13 +825,13 @@ local function update_parsers() -- todo: don't remap utf sequences
local number = Cs( P("$") * (1-P("$"))^1 * P("$")
+ P([[\m{]]) * (1-P("}"))^1 * P("}")
+ (1-R("az","AZ")-P(" "))^1 -- todo: catch { } -- not ok
- ) / unitsN
+ ) / ctx_unitsN
- local start = Cc(nil) / unitsNstart
- local stop = Cc(nil) / unitsNstop
- local space = Cc(nil) / unitsNspace
+ local start = Cc(nil) / ctx_unitsNstart
+ local stop = Cc(nil) / ctx_unitsNstop
+ local space = Cc(nil) / ctx_unitsNspace
- -- todo: avoid \unitsNstart\unitsNstop (weird that it can happen .. now catched at tex end)
+ -- todo: avoid \ctx_unitsNstart\ctx_unitsNstop (weird that it can happen .. now catched at tex end)
local p_c_combinedparser = P { "start",
number = start * dleader * (p_c_dparser + number) * stop,
diff --git a/Master/texmf-dist/tex/context/base/publ-aut.lua b/Master/texmf-dist/tex/context/base/publ-aut.lua
new file mode 100644
index 00000000000..ba492a93bd6
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-aut.lua
@@ -0,0 +1,550 @@
+if not modules then modules = { } end modules ['publ-aut'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not characters then
+ dofile(resolvers.findfile("char-def.lua"))
+ dofile(resolvers.findfile("char-ini.lua"))
+end
+
+local context = context
+local chardata = characters.data
+
+local tostring = tostring
+local concat = table.concat
+local lpeg = lpeg
+local utfchar = utf.char
+
+local publications = publications or { }
+
+local datasets = publications.datasets or { }
+publications.datasets = datasets
+
+publications.authors = publications.authors or { }
+local authors = publications.authors
+
+local P, C, V, Cs, Ct, lpegmatch, lpegpatterns = lpeg.P, lpeg.C, lpeg.V, lpeg.Cs, lpeg.Ct, lpeg.match, lpeg.patterns
+
+-- local function makesplitter(separator)
+-- return Ct { "start",
+-- start = (Cs((V("outer") + (1-separator))^1) + separator^1)^1,
+-- start = Cs(V("outer")) + (Cs((V("inner") + (1-separator))^1) + separator^1)^1,
+-- outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^0) * (P("}")/""),
+-- inner = P("{") * ((V("inner") + P(1-P("}")))^0) * P("}"),
+-- }
+-- end
+
+local space = P(" ")
+local comma = P(",")
+local firstcharacter = lpegpatterns.utf8byte
+
+-- local andsplitter = lpeg.tsplitat(space^1 * "and" * space^1)
+-- local commasplitter = lpeg.tsplitat(space^0 * comma * space^0)
+-- local spacesplitter = lpeg.tsplitat(space^1)
+
+local p_and = space^1 * "and" * space^1
+local p_comma = space^0 * comma * space^0
+local p_space = space^1
+
+local andsplitter = Ct { "start",
+ start = (Cs((V("inner") + (1-p_and))^1) + p_and)^1,
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local commasplitter = Ct { "start",
+ start = Cs(V("outer")) + (Cs((V("inner") + (1-p_comma))^1) + p_comma)^1,
+ outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local spacesplitter = Ct { "start",
+ start = Cs(V("outer")) + (Cs((V("inner") + (1-p_space))^1) + p_space)^1,
+ outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local function is_upper(str)
+ local first = lpegmatch(firstcharacter,str)
+ local okay = chardata[first]
+ return okay and okay.category == "lu"
+end
+
+local cache = { } -- 33% reuse on tugboat.bib
+local nofhits = 0
+local nofused = 0
+
+local function splitauthorstring(str)
+ if not str then
+ return
+ end
+ nofused = nofused + 1
+ local authors = cache[str]
+ if authors then
+ -- hit 1
+ -- print("hit 1",author,nofhits,nofused,math.round(100*nofhits/nofused))
+ return { authors } -- we assume one author
+ end
+ local authors = lpegmatch(andsplitter,str)
+ for i=1,#authors do
+ local author = authors[i]
+ local detail = cache[author]
+ if detail then
+ -- hit 2
+ -- print("hit 2",author,nofhits,nofused,math.round(100*nofhits/nofused))
+ end
+ if not detail then
+ local firstnames, vons, surnames, initials, juniors
+ local split = lpegmatch(commasplitter,author)
+-- inspect(split)
+ local n = #split
+ if n == 1 then
+ -- First von Last
+ local words = lpegmatch(spacesplitter,author)
+ firstnames, vons, surnames = { }, { }, { }
+ local i, n = 1, #words
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ firstnames[#firstnames+1], i = w, i + 1
+ else
+ break
+ end
+ end
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ break
+ else
+ vons[#vons+1], i = w, i + 1
+ end
+ end
+ if i <= n then
+ while i <= n do
+ surnames[#surnames+1], i = words[i], i + 1
+ end
+ elseif #vons == 0 then
+ surnames[1] = firstnames[#firstnames]
+ firstnames[#firstnames] = nil
+ else
+ -- mess
+ end
+ -- safeguard
+ if #surnames == 0 then
+ firstnames = { }
+ vons = { }
+ surnames = { author }
+ end
+ elseif n == 2 then
+ -- von Last, First
+ firstnames, vons, surnames = { }, { }, { }
+ local words = lpegmatch(spacesplitter,split[1])
+ local i, n = 1, #words
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ break
+ else
+ vons[#vons+1], i = w, i + 1
+ end
+ end
+ while i <= n do
+ surnames[#surnames+1], i = words[i], i + 1
+ end
+ --
+ local words = lpegmatch(spacesplitter,split[2])
+ local i, n = 1, #words
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ firstnames[#firstnames+1], i = w, i + 1
+ else
+ break
+ end
+ end
+ while i <= n do
+ vons[#vons+1], i = words[i], i + 1
+ end
+ else
+ -- von Last, Jr ,First
+ firstnames = lpegmatch(spacesplitter,split[1])
+ juniors = lpegmatch(spacesplitter,split[2])
+ surnames = lpegmatch(spacesplitter,split[3])
+ if n > 3 then
+ -- error
+ end
+ end
+ if #surnames == 0 then
+ surnames[1] = firstnames[#firstnames]
+ firstnames[#firstnames] = nil
+ end
+ if firstnames then
+ initials = { }
+ for i=1,#firstnames do
+ initials[i] = utfchar(lpegmatch(firstcharacter,firstnames[i]))
+ end
+ end
+ detail = {
+ original = author,
+ firstnames = firstnames,
+ vons = vons,
+ surnames = surnames,
+ initials = initials,
+ juniors = juniors,
+ }
+ cache[author] = detail
+ nofhits = nofhits + 1
+ end
+ authors[i] = detail
+ end
+ return authors
+end
+
+-- local function splitauthors(dataset,tag,field)
+-- local entries = datasets[dataset]
+-- local luadata = entries.luadata
+-- if not luadata then
+-- return { }
+-- end
+-- local entry = luadata[tag]
+-- if not entry then
+-- return { }
+-- end
+-- return splitauthorstring(entry[field])
+-- end
+
+local function the_initials(initials,symbol)
+ local t, symbol = { }, symbol or "."
+ for i=1,#initials do
+ t[i] = initials[i] .. symbol
+ end
+ return t
+end
+
+-- authors
+
+local settings = { }
+
+-- local defaultsettings = {
+-- firstnamesep = " ",
+-- vonsep = " ",
+-- surnamesep = " ",
+-- juniorsep = " ",
+-- surnamejuniorsep = ", ",
+-- juniorjuniorsep = ", ",
+-- surnamefirstnamesep = ", ",
+-- surnameinitialsep = ", ",
+-- namesep = ", ",
+-- lastnamesep = " and ",
+-- finalnamesep = " and ",
+-- etallimit = 1000,
+-- etaldisplay = 1000,
+-- etaltext = "",
+-- }
+
+local defaultsettings = {
+ firstnamesep = [[\btxlistvariantparameter{firstnamesep}]],
+ vonsep = [[\btxlistvariantparameter{vonsep}]],
+ surnamesep = [[\btxlistvariantparameter{surnamesep}]],
+ juniorsep = [[\btxlistvariantparameter{juniorsep}]],
+ surnamejuniorsep = [[\btxlistvariantparameter{surnamejuniorsep}]],
+ juniorjuniorsep = [[\btxlistvariantparameter{juniorjuniorsep}]],
+ surnamefirstnamesep = [[\btxlistvariantparameter{surnamefirstnamesep}]],
+ surnameinitialsep = [[\btxlistvariantparameter{surnameinitialsep}]],
+ namesep = [[\btxlistvariantparameter{namesep}]],
+ lastnamesep = [[\btxlistvariantparameter{lastnamesep}]],
+ finalnamesep = [[\btxlistvariantparameter{finalnamesep}]],
+ --
+ etaltext = [[\btxlistvariantparameter{etaltext}]],
+ --
+ etallimit = 1000,
+ etaldisplay = 1000,
+}
+
+function authors.setsettings(s)
+end
+
+authors.splitstring = splitauthorstring
+
+-- [firstnames] [firstnamesep] [vons] [vonsep] [surnames] [juniors] [surnamesep] (Taco, von Hoekwater, jr)
+
+function authors.normal(author,settings)
+ local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors
+ local result, settings = { }, settings or defaultsettings
+ if firstnames and #firstnames > 0 then
+ result[#result+1] = concat(firstnames," ")
+ result[#result+1] = settings.firstnamesep or defaultsettings.firstnamesep
+ end
+ if vons and #vons > 0 then
+ result[#result+1] = concat(vons," ")
+ result[#result+1] = settings.vonsep or defaultsettings.vonsep
+ end
+ if surnames and #surnames > 0 then
+ result[#result+1] = concat(surnames," ")
+ if juniors and #juniors > 0 then
+ result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
+ result[#result+1] = concat(juniors," ")
+ end
+ elseif juniors and #juniors > 0 then
+ result[#result+1] = concat(juniors," ")
+ end
+ return concat(result)
+end
+
+-- [initials] [initialsep] [vons] [vonsep] [surnames] [juniors] [surnamesep] (T, von Hoekwater, jr)
+
+function authors.normalshort(author,settings)
+ local initials, vons, surnames, juniors = author.initials, author.vons, author.surnames, author.juniors
+ local result, settings = { }, settings or defaultsettings
+ if initials and #initials > 0 then
+ result[#result+1] = concat(initials," ")
+ result[#result+1] = settings.initialsep or defaultsettings.initialsep
+ end
+ if vons and #vons > 0 then
+ result[#result+1] = concat(vons," ")
+ result[#result+1] = settings.vonsep or defaultsettings.vonsep
+ end
+ if surnames and #surnames > 0 then
+ result[#result+1] = concat(surnames," ")
+ if juniors and #juniors > 0 then
+ result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
+ result[#result+1] = concat(juniors," ")
+ end
+ elseif juniors and #juniors > 0 then
+ result[#result+1] = concat(juniors," ")
+ end
+ return concat(result)
+end
+
+-- vons surnames juniors, firstnames
+
+-- [vons] [vonsep] [surnames] [surnamejuniorsep] [juniors] [surnamefirstnamesep] [firstnames] (von Hoekwater jr, Taco)
+
+function authors.inverted(author,settings)
+ local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors
+ local result, settings = { }, settings or defaultsettings
+ if vons and #vons > 0 then
+ result[#result+1] = concat(vons," ")
+ result[#result+1] = settings.vonsep or defaultsettings.vonsep
+ end
+ if surnames and #surnames > 0 then
+ result[#result+1] = concat(surnames," ")
+ if juniors and #juniors > 0 then
+ result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
+ result[#result+1] = concat(juniors," ")
+ end
+ elseif juniors and #juniors > 0 then
+ result[#result+1] = concat(juniors," ")
+ end
+ if firstnames and #firstnames > 0 then
+ result[#result+1] = settings.surnamefirstnamesep or defaultsettings.surnamefirstnamesep
+ result[#result+1] = concat(firstnames," ")
+ end
+ return concat(result)
+end
+
+-- [vons] [vonsep] [surnames] [surnamejuniorsep] [juniors] [surnamefirstnamesep] [initials] (von Hoekwater jr, T)
+
+function authors.invertedshort(author,settings)
+ local vons, surnames, initials, juniors = author.vons, author.surnames, author.initials, author.juniors
+ local result, settings = { }, settings or defaultsettings
+ if vons and #vons > 0 then
+ result[#result+1] = concat(vons," ")
+ result[#result+1] = settings.vonsep or defaultsettings.vonsep
+ end
+ if surnames and #surnames > 0 then
+ result[#result+1] = concat(surnames," ")
+ if juniors and #juniors > 0 then
+ result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
+ result[#result+1] = concat(juniors," ")
+ end
+ elseif juniors and #juniors > 0 then
+ result[#result+1] = concat(juniors," ")
+ end
+ if initials and #initials > 0 then
+ result[#result+1] = settings.surnameinitialsep or defaultsettings.surnameinitialsep
+ result[#result+1] = concat(the_initials(initials)," ")
+ end
+ return concat(result)
+end
+
+local lastconcatsize = 1
+
+local function concatnames(t,settings)
+ local namesep = settings.namesep
+ local lastnamesep = settings.lastnamesep
+ local finalnamesep = settings.finalnamesep
+ local lastconcatsize = #t
+ if lastconcatsize > 2 then
+ local s = { }
+ for i=1,lastconcatsize-2 do
+ s[i] = t[i] .. namesep
+ end
+ s[lastconcatsize-1], s[lastconcatsize] = t[lastconcatsize-1] .. finalnamesep, t[lastconcatsize]
+ return concat(s)
+ elseif lastconcatsize > 1 then
+ return concat(t,lastnamesep)
+ elseif lastconcatsize > 0 then
+ return t[1]
+ else
+ return ""
+ end
+end
+
+function authors.concat(dataset,tag,field,settings)
+ table.setmetatableindex(settings,defaultsettings)
+ local combiner = settings.combiner
+ if not combiner or type(combiner) == "string" then
+ combiner = authors[combiner or "normal"] or authors.normal
+ end
+ local split = datasets[dataset].details[tag][field]
+ local etallimit = settings.etallimit or 1000
+ local etaldisplay = settings.etaldisplay or etallimit
+ local max = split and #split or 0
+ if max == 0 then
+ -- error
+ end
+ if max > etallimit and etaldisplay < max then
+ max = etaldisplay
+ end
+ local combined = { }
+ for i=1,max do
+ combined[i] = combiner(split[i],settings)
+ end
+ local result = concatnames(combined,settings)
+ if #combined <= max then
+ return result
+ else
+ return result .. settings.etaltext
+ end
+end
+
+function commands.btxauthor(...)
+ context(authors.concat(...))
+end
+
+function authors.short(author,year)
+ -- todo
+-- local result = { }
+-- if author then
+-- local authors = splitauthors(author)
+-- for a=1,#authors do
+-- local aa = authors[a]
+-- local initials = aa.initials
+-- for i=1,#initials do
+-- result[#result+1] = initials[i]
+-- end
+-- local surnames = aa.surnames
+-- for s=1,#surnames do
+-- result[#result+1] = utfchar(lpegmatch(firstcharacter,surnames[s]))
+-- end
+-- end
+-- end
+-- if year then
+-- result[#result+1] = year
+-- end
+-- return concat(result)
+end
+
+-- We can consider creating a hashtable key -> entry but I wonder if
+-- pays off.
+
+local compare = sorters.comparers.basic -- (a,b)
+local strip = sorters.strip
+local splitter = sorters.splitters.utf
+
+function authors.preparedsort(dataset,list,sorttype_a,sorttype_b,sorttype_c)
+ local luadata = datasets[dataset].luadata
+ local details = datasets[dataset].details
+ local valid = { }
+ local splitted = { }
+ table.setmetatableindex(splitted,function(t,k) -- could be done in the sorter but seldom that many shared
+ local v = splitter(k,true) -- in other cases
+ t[k] = v
+ return v
+ end)
+ local snippets = { }
+ for i=1,#list do
+ -- either { tag, tag, ... } or { { tag, index }, { tag, index } }
+ local li = list[i]
+ local tag = type(li) == "string" and li or li[1]
+ local entry = luadata[tag]
+ local detail = details[tag]
+ local suffix = tostring(i)
+ local year = nil
+ local assembled = nil
+ if entry and detail then
+ local key = detail[sorttype_a] or detail[sorttype_b] or detail[sorttype_c]
+ if key then
+ -- maybe an option is to also sort the authors first
+ local n = #key
+ local s = 0
+ for i=1,n do
+ local k = key[i]
+ local vons = k.vons
+ local surnames = k.surnames
+ local initials = k.initials
+ if vons and #vons > 0 then
+ s = s + 1 ; snippets[s] = concat(vons," ")
+ end
+ if surnames and #surnames > 0 then
+ s = s + 1 ; snippets[s] = concat(surnames," ")
+ end
+ if initials and #initials > 0 then
+ s = s + 1 ; snippets[s] = concat(initials," ")
+ end
+ end
+ assembled = concat(snippets," ",1,s)
+ else
+ assembled = ""
+ end
+ year = entry.year or "9998"
+ else
+ assembled = ""
+ year = "9999"
+ end
+ valid[i] = {
+ index = i,
+ split = {
+ splitted[strip(assembled)],
+ splitted[year],
+ splitted[suffix],
+ },
+-- names = assembled,
+-- year = year,
+-- suffix = suffix,
+ }
+ end
+ return valid
+end
+
+function authors.sorted(dataset,list,sorttype) -- experimental
+ local valid = authors.preparedsort(dataset,list,sorttype)
+ if #valid == 0 or #valid ~= #list then
+ return list
+ else
+ sorters.sort(valid,compare)
+ for i=1,#valid do
+ valid[i] = valid[i].index
+ end
+ return valid
+ end
+end
+
+-- local dataset = publications.datasets.test
+--
+-- local function add(str)
+-- dataset.details[str] = { author = publications.authors.splitstring(str) }
+-- end
+--
+-- add("Hagen, Hans and Hoekwater, Taco Whoever T. Ex. and Henkel Hut, Hartmut Harald von der")
+-- add("Hans Hagen and Taco Whoever T. Ex. Hoekwater and Hartmut Harald von der Henkel Hut")
+-- add("de Gennes, P. and Gennes, P. de")
+-- add("van't Hoff, J. H. and {van't Hoff}, J. H.")
+--
+-- local list = table.keys(dataset.details)
+-- local sort = publications.authors.sorted("test",list,"author")
+-- local test = { } for i=1,#sort do test[i] = dataset.details[list[sort[i]]] end
diff --git a/Master/texmf-dist/tex/context/base/publ-dat.lua b/Master/texmf-dist/tex/context/base/publ-dat.lua
new file mode 100644
index 00000000000..b463064ca53
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-dat.lua
@@ -0,0 +1,527 @@
+if not modules then modules = { } end modules ['publ-dat'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: strip the @ in the lpeg instead of on do_definition and do_shortcut
+-- todo: store bibroot and bibrootdt
+
+--[[ldx--
+This is a prelude to integrated bibliography support. This file just loads
+bibtex files and converts them to xml so that the we access the content
+in a convenient way. Actually handling the data takes place elsewhere.
+--ldx]]--
+
+if not characters then
+ dofile(resolvers.findfile("char-def.lua"))
+ dofile(resolvers.findfile("char-ini.lua"))
+ dofile(resolvers.findfile("char-tex.lua"))
+end
+
+local chardata = characters.data
+local lowercase = characters.lower
+
+local lower, gsub, concat = string.lower, string.gsub, table.concat
+local next, type = next, type
+local utfchar = utf.char
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local textoutf = characters and characters.tex.toutf
+local settings_to_hash, settings_to_array = utilities.parsers.settings_to_hash, utilities.parsers.settings_to_array
+local formatters = string.formatters
+local sortedkeys, sortedhash = table.sortedkeys, table.sortedhash
+local xmlcollected, xmltext, xmlconvert = xml.collected, xml.text, xmlconvert
+local setmetatableindex = table.setmetatableindex
+
+-- todo: more allocate
+
+local P, R, S, V, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
+
+local trace = false trackers.register("publications", function(v) trace = v end)
+local report = logs.reporter("publications")
+
+publications = publications or { }
+local publications = publications
+
+local datasets = publications.datasets or { }
+publications.datasets = datasets
+
+publications.statistics = publications.statistics or { }
+local publicationsstats = publications.statistics
+
+publicationsstats.nofbytes = 0
+publicationsstats.nofdefinitions = 0
+publicationsstats.nofshortcuts = 0
+publicationsstats.nofdatasets = 0
+
+local xmlplaceholder = "\n"
+
+local defaultshortcuts = {
+ jan = "1",
+ feb = "2",
+ mar = "3",
+ apr = "4",
+ may = "5",
+ jun = "6",
+ jul = "7",
+ aug = "8",
+ sep = "9",
+ oct = "10",
+ nov = "11",
+ dec = "12",
+}
+
+function publications.new(name)
+ publicationsstats.nofdatasets = publicationsstats.nofdatasets + 1
+ local dataset = {
+ name = name or "dataset " .. publicationsstats.nofdatasets,
+ nofentries = 0,
+ shortcuts = { },
+ luadata = { },
+ xmldata = xmlconvert(xmlplaceholder),
+ -- details = { },
+ nofbytes = 0,
+ entries = nil, -- empty == all
+ sources = { },
+ loaded = { },
+ fields = { },
+ userdata = { },
+ used = { },
+ commands = { }, -- for statistical purposes
+ status = {
+ resources = false,
+ userdata = false,
+ },
+ }
+ setmetatableindex(dataset,function(t,k)
+ -- will become a plugin
+ if k == "details" and publications.enhance then
+ dataset.details = { }
+ publications.enhance(dataset.name)
+ return dataset.details
+ end
+ end)
+ return dataset
+end
+
+function publications.markasupdated(name)
+ if name == "string" then
+ datasets[name].details = nil
+ else
+ datasets.details = nil
+ end
+end
+
+setmetatableindex(datasets,function(t,k)
+ if type(k) == "table" then
+ return k -- so we can use this accessor as checker
+ else
+ local v = publications.new(k)
+ datasets[k] = v
+ return v
+ end
+end)
+
+-- we apply some normalization
+
+local space = S(" \t\n\r\f") -- / " "
+
+----- command = P("\\") * Cc("btxcmd{") * (R("az","AZ")^1) * Cc("}")
+----- command = P("\\") * (Carg(1) * C(R("az","AZ")^1) / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
+local command = P("\\") * (Carg(1) * C(R("az","AZ")^1) * space^0 / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
+local somemath = P("$") * ((1-P("$"))^1) * P("$") -- let's not assume nested math
+local any = P(1)
+local done = P(-1)
+local one_l = P("{") / ""
+local one_r = P("}") / ""
+local two_l = P("{{") / ""
+local two_r = P("}}") / ""
+local special = P("#") / "\\letterhash"
+
+local filter_0 = S('\\{}')
+local filter_1 = (1-filter_0)^0 * filter_0
+local filter_2 = Cs(
+-- {{...}} ... {{...}}
+-- two_l * (command + special + any - two_r - done)^0 * two_r * done +
+-- one_l * (command + special + any - one_r - done)^0 * one_r * done +
+ (somemath + command + special + any )^0
+)
+
+-- Currently we expand shortcuts and for large ones (like the acknowledgements
+-- in tugboat.bib this is not that efficient. However, eventually strings get
+-- hashed again.
+
+local function do_shortcut(key,value,dataset)
+ publicationsstats.nofshortcuts = publicationsstats.nofshortcuts + 1
+ dataset.shortcuts[key] = value
+end
+
+local function getindex(dataset,luadata,tag)
+ local found = luadata[tag]
+ if found then
+ return found.index or 0
+ else
+ local index = dataset.nofentries + 1
+ dataset.nofentries = index
+ return index
+ end
+end
+
+publications.getindex = getindex
+
+-- todo: categories : metatable that lowers and also counts
+-- todo: fields : metatable that lowers
+
+local function do_definition(category,tag,tab,dataset)
+ publicationsstats.nofdefinitions = publicationsstats.nofdefinitions + 1
+ local fields = dataset.fields
+ local luadata = dataset.luadata
+ local found = luadata[tag]
+ local index = getindex(dataset,luadata,tag)
+ local entries = {
+ category = lower(category),
+ tag = tag,
+ index = index,
+ }
+ for i=1,#tab,2 do
+ local original = tab[i]
+ local normalized = fields[original]
+ if not normalized then
+ normalized = lower(original) -- we assume ascii fields
+ fields[original] = normalized
+ end
+ local value = tab[i+1]
+ value = textoutf(value)
+ if lpegmatch(filter_1,value) then
+ value = lpegmatch(filter_2,value,1,dataset.commands) -- we need to start at 1 for { }
+ end
+ if normalized == "crossref" then
+ local parent = luadata[value]
+ if parent then
+ setmetatableindex(entries,parent)
+ else
+ -- warning
+ end
+ end
+ entries[normalized] = value
+ end
+ luadata[tag] = entries
+end
+
+local function resolve(s,dataset)
+ return dataset.shortcuts[s] or defaultshortcuts[s] or s -- can be number
+end
+
+local percent = P("%")
+local start = P("@")
+local comma = P(",")
+local hash = P("#")
+local escape = P("\\")
+local single = P("'")
+local double = P('"')
+local left = P('{')
+local right = P('}')
+local both = left + right
+local lineending = S("\n\r")
+local space = S(" \t\n\r\f") -- / " "
+local spacing = space^0
+local equal = P("=")
+----- collapsed = (space^1)/ " "
+local collapsed = (lpegpatterns.whitespace^1)/ " "
+
+----- balanced = lpegpatterns.balanced
+local balanced = P {
+ [1] = ((escape * (left+right)) + (collapsed + 1 - (left+right)) + V(2))^0,
+ [2] = left * V(1) * right
+}
+
+local keyword = C((R("az","AZ","09") + S("@_:-"))^1)
+local key = C((1-space-equal)^1)
+local tag = C((1-space-comma)^1)
+local reference = keyword
+local category = P("@") * C((1-space-left)^1)
+local s_quoted = ((escape*single) + collapsed + (1-single))^0
+local d_quoted = ((escape*double) + collapsed + (1-double))^0
+
+local b_value = (left /"") * balanced * (right /"")
+local s_value = (single/"") * (b_value + s_quoted) * (single/"")
+local d_value = (double/"") * (b_value + d_quoted) * (double/"")
+local r_value = reference * Carg(1) /resolve
+
+local somevalue = s_value + d_value + b_value + r_value
+local value = Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0))
+
+local assignment = spacing * key * spacing * equal * spacing * value * spacing
+local shortcut = P("@") * (P("string") + P("STRING")) * spacing * left * ((assignment * Carg(1))/do_shortcut * comma^0)^0 * spacing * right
+local definition = category * spacing * left * spacing * tag * spacing * comma * Ct((assignment * comma^0)^0) * spacing * right * Carg(1) / do_definition
+local comment = keyword * spacing * left * (1-right)^0 * spacing * right
+local forget = percent^1 * (1-lineending)^0
+
+-- todo \%
+
+local bibtotable = (space + forget + shortcut + definition + comment + 1)^0
+
+-- loadbibdata -> dataset.luadata
+-- loadtexdata -> dataset.luadata
+-- loadluadata -> dataset.luadata
+
+-- converttoxml -> dataset.xmldata from dataset.luadata
+
+function publications.loadbibdata(dataset,content,source,kind)
+ dataset = datasets[dataset]
+ statistics.starttiming(publications)
+ publicationsstats.nofbytes = publicationsstats.nofbytes + #content
+ dataset.nofbytes = dataset.nofbytes + #content
+ if source then
+ table.insert(dataset.sources, { filename = source, checksum = md5.HEX(content) })
+ dataset.loaded[source] = kind or true
+ end
+ dataset.newtags = #dataset.luadata > 0 and { } or dataset.newtags
+ publications.markasupdated(dataset)
+ lpegmatch(bibtotable,content or "",1,dataset)
+ statistics.stoptiming(publications)
+end
+
+-- we could use xmlescape again
+
+local cleaner_0 = S('<>&')
+local cleaner_1 = (1-cleaner_0)^0 * cleaner_0
+local cleaner_2 = Cs ( (
+ P("<") / "<" +
+ P(">") / ">" +
+ P("&") / "&" +
+ P(1)
+)^0)
+
+local compact = false -- can be a directive but then we also need to deal with newlines ... not now
+
+function publications.converttoxml(dataset,nice) -- we have fields !
+ dataset = datasets[dataset]
+ local luadata = dataset and dataset.luadata
+ if luadata then
+ statistics.starttiming(publications)
+ statistics.starttiming(xml)
+ --
+ local result, r = { }, 0
+ --
+ r = r + 1 ; result[r] = ""
+ r = r + 1 ; result[r] = ""
+ --
+ if nice then
+ local f_entry_start = formatters[" "]
+ local f_entry_stop = " "
+ local f_field = formatters[" %s"]
+ for tag, entry in sortedhash(luadata) do
+ r = r + 1 ; result[r] = f_entry_start(tag,entry.category,entry.index)
+ for key, value in sortedhash(entry) do
+ if key ~= "tag" and key ~= "category" and key ~= "index" then
+ if lpegmatch(cleaner_1,value) then
+ value = lpegmatch(cleaner_2,value)
+ end
+ if value ~= "" then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
+ end
+ end
+ r = r + 1 ; result[r] = f_entry_stop
+ end
+ else
+ local f_entry_start = formatters[""]
+ local f_entry_stop = ""
+ local f_field = formatters["%s"]
+ for tag, entry in next, luadata do
+ r = r + 1 ; result[r] = f_entry_start(entry.tag,entry.category,entry.index)
+ for key, value in next, entry do
+ if key ~= "tag" and key ~= "category" and key ~= "index" then
+ if lpegmatch(cleaner_1,value) then
+ value = lpegmatch(cleaner_2,value)
+ end
+ if value ~= "" then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
+ end
+ end
+ r = r + 1 ; result[r] = f_entry_stop
+ end
+ end
+ --
+ r = r + 1 ; result[r] = ""
+ --
+ result = concat(result,nice and "\n" or nil)
+ --
+ dataset.xmldata = xmlconvert(result, {
+ resolve_entities = true,
+ resolve_predefined_entities = true, -- in case we have escaped entities
+ -- unify_predefined_entities = true, -- & -> &
+ utfize_entities = true,
+ } )
+ --
+ statistics.stoptiming(xml)
+ statistics.stoptiming(publications)
+ if lxml then
+ lxml.register(formatters["btx:%s"](dataset.name),dataset.xmldata)
+ end
+ end
+end
+
+local loaders = publications.loaders or { }
+publications.loaders = loaders
+
+function loaders.bib(dataset,filename,kind)
+ dataset = datasets[dataset]
+ local data = io.loaddata(filename) or ""
+ if data == "" then
+ report("empty file %a, nothing loaded",filename)
+ elseif trace then
+ report("loading file",filename)
+ end
+ publications.loadbibdata(dataset,data,filename,kind)
+end
+
+function loaders.lua(dataset,filename) -- if filename is a table we load that one
+ dataset = datasets[dataset]
+ inspect(filename)
+ local data = type(filename) == "table" and filename or table.load(filename)
+ if data then
+ local luadata = dataset.luadata
+ for tag, entry in next, data do
+ if type(entry) == "table" then
+ entry.index = getindex(dataset,luadata,tag)
+ luadata[tag] = entry -- no cleaning yet
+ end
+ end
+ end
+end
+
+function loaders.xml(dataset,filename)
+ dataset = datasets[dataset]
+ local luadata = dataset.luadata
+ local root = xml.load(filename)
+ for bibentry in xmlcollected(root,"/bibtex/entry") do
+ local attributes = bibentry.at
+ local tag = attributes.tag
+ local entry = {
+ category = attributes.category
+ }
+ for field in xmlcollected(bibentry,"/field") do
+ -- entry[field.at.name] = xmltext(field)
+ entry[field.at.name] = field.dt[1] -- no cleaning yet
+ end
+ -- local edt = entry.dt
+ -- for i=1,#edt do
+ -- local e = edt[i]
+ -- local a = e.at
+ -- if a and a.name then
+ -- t[a.name] = e.dt[1] -- no cleaning yet
+ -- end
+ -- end
+ entry.index = getindex(dataset,luadata,tag)
+ luadata[tag] = entry
+ end
+end
+
+setmetatableindex(loaders,function(t,filetype)
+ local v = function(dataset,filename)
+ report("no loader for file %a with filetype %a",filename,filetype)
+ end
+ t[k] = v
+ return v
+end)
+
+function publications.load(dataset,filename,kind)
+ dataset = datasets[dataset]
+ statistics.starttiming(publications)
+ local files = settings_to_array(filename)
+ for i=1,#files do
+ local filetype, filename = string.splitup(files[i],"::")
+ if not filename then
+ filename = filetype
+ filetype = file.suffix(filename)
+ end
+ local fullname = resolvers.findfile(filename,"bib")
+ if dataset.loaded[fullname] then -- will become better
+ -- skip
+ elseif fullname == "" then
+ report("no file %a",filename)
+ else
+ loaders[filetype](dataset,fullname)
+ end
+ if kind then
+ dataset.loaded[fullname] = kind
+ end
+ end
+ statistics.stoptiming(publications)
+ return dataset
+end
+
+local checked = function(s,d) d[s] = (d[s] or 0) + 1 end
+local checktex = ( (1-P("\\"))^1 + P("\\") * ((C(R("az","AZ")^1) * Carg(1))/checked))^0
+
+function publications.analyze(dataset)
+ dataset = datasets[dataset]
+ local data = dataset.luadata
+ local categories = { }
+ local fields = { }
+ local commands = { }
+ for k, v in next, data do
+ categories[v.category] = (categories[v.category] or 0) + 1
+ for k, v in next, v do
+ fields[k] = (fields[k] or 0) + 1
+ lpegmatch(checktex,v,1,commands)
+ end
+ end
+ dataset.analysis = {
+ categories = categories,
+ fields = fields,
+ commands = commands,
+ }
+end
+
+-- str = [[
+-- @COMMENT { CRAP }
+-- @STRING{ hans = "h a n s" }
+-- @STRING{ taco = "t a c o" }
+-- @SOMETHING{ key1, abc = "t a c o" , def = "h a n s" }
+-- @SOMETHING{ key2, abc = hans # taco }
+-- @SOMETHING{ key3, abc = "hans" # taco }
+-- @SOMETHING{ key4, abc = hans # "taco" }
+-- @SOMETHING{ key5, abc = hans # taco # "hans" # "taco"}
+-- @SOMETHING{ key6, abc = {oeps {oeps} oeps} }
+-- ]]
+
+-- local dataset = publications.new()
+-- publications.tolua(dataset,str)
+-- publications.toxml(dataset)
+-- publications.toxml(dataset)
+-- print(dataset.xmldata)
+-- inspect(dataset.luadata)
+-- inspect(dataset.xmldata)
+-- inspect(dataset.shortcuts)
+-- print(dataset.nofbytes,statistics.elapsedtime(publications))
+
+-- local dataset = publications.new()
+-- publications.load(dataset,"IEEEabrv.bib")
+-- publications.load(dataset,"IEEEfull.bib")
+-- publications.load(dataset,"IEEEexample.bib")
+-- publications.toxml(dataset)
+-- print(dataset.nofbytes,statistics.elapsedtime(publications))
+
+-- local dataset = publications.new()
+-- publications.load(dataset,"gut.bib")
+-- publications.load(dataset,"komoedie.bib")
+-- publications.load(dataset,"texbook1.bib")
+-- publications.load(dataset,"texbook2.bib")
+-- publications.load(dataset,"texbook3.bib")
+-- publications.load(dataset,"texgraph.bib")
+-- publications.load(dataset,"texjourn.bib")
+-- publications.load(dataset,"texnique.bib")
+-- publications.load(dataset,"tugboat.bib")
+-- publications.toxml(dataset)
+-- print(dataset.nofbytes,statistics.elapsedtime(publications))
+
+-- print(table.serialize(dataset.luadata))
+-- print(table.serialize(dataset.xmldata))
+-- print(table.serialize(dataset.shortcuts))
+-- print(xml.serialize(dataset.xmldata))
diff --git a/Master/texmf-dist/tex/context/base/publ-imp-apa.mkiv b/Master/texmf-dist/tex/context/base/publ-imp-apa.mkiv
new file mode 100644
index 00000000000..3f7b119af67
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-imp-apa.mkiv
@@ -0,0 +1,547 @@
+%D \module
+%D [ file=publ-imp-apa,
+%D version=2013.12.12, % based on bibl-apa.tex and later xml variant
+%D title=APA bibliography style,
+%D subtitle=Publications,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+% common
+
+% \loadbtxdefinitionfile[def]
+
+\startsetups btx:apa:common:wherefrom
+ \btxdoifelse {address} {
+ \getvariable{btx:temp}{left}
+ \btxdoifelse {country} {
+ \btxdoifelse {\getvariable{btx:temp}{label}} {
+ \btxflush{address}\btxcomma\btxflush{country}: \btxflush{\getvariable{btx:temp}{label}}
+ } {
+ \btxflush{address}\btxcomma\btxflush{country}
+ }
+ } {
+ \btxdoifelse {\getvariable{btx:temp}{label}} {
+ \btxflush{address}\btxcomma\btxflush{\getvariable{btx:temp}{label}}
+ } {
+ \btxflush{address}
+ }
+ }
+ \getvariable{btx:temp}{right}
+ } {
+ \btxdoifelse {country} {
+ \getvariable{btx:temp}{left}
+ \btxdoifelse {\getvariable{btx:temp}{label}} {
+ \btxflush{country}: \btxflush{\getvariable{btx:temp}{label}}
+ } {
+ \btxflush{country}
+ }
+ \getvariable{btx:temp}{right}
+ } {
+ \btxdoifelse {\getvariable{btx:temp}{label}} {
+ \getvariable{btx:temp}{left}
+ \btxflush{\getvariable{btx:temp}{label}}
+ \getvariable{btx:temp}{right}
+ } {
+ \getvariable{btx:temp}{otherwise}
+ }
+ }
+ }
+\stopsetups
+
+% \setvariables[btx:temp][label=,left=,right=,otherwise=]
+
+\startsetups btx:apa:common:publisher
+ \begingroup
+ \setvariables[btx:temp][label=publisher]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:organization
+ \begingroup
+ \setvariables[btx:temp][label=organization]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:school
+ \begingroup
+ \setvariables[btx:temp][label=school]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:institution
+ \begingroup
+ \setvariables[btx:temp][label=institution]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:school:subsentence
+ \begingroup
+ \setvariables[btx:temp][label=school,left=\btxcomma,right=\btxperiod,otherwise=\btxperiod]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:institution:subsentence
+ \begingroup
+ \setvariables[btx:temp][label=institution,left=\btxcomma,right=\btxperiod,otherwise=\btxperiod]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:publisher:sentence
+ \begingroup
+ \setvariables[btx:temp][label=publisher,left=\btxspace,right=\btxperiod]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:organization:sentence
+ \begingroup
+ \setvariables[btx:temp][label=organization,left=\btxspace,right=\btxperiod]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:title-and-series
+ \btxdoif {title} {
+ \btxflush{title}
+ \btxdoif {series} {
+ \btxlparent\btxflush{series}\btxrparent
+ }
+ \btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:title-it-and-series
+ \btxdoif {title} {
+ \bgroup\it\btxflush{title}\/\egroup
+ \btxdoif {series} {
+ \btxlparent\btxflush{series}\btxrparent
+ }
+ \btxperiod
+ }
+\stopsetups
+
+\disablemode[btx:apa:edited-book]
+
+\startsetups btx:apa:common:author-and-year
+ \btxdoif {author} {
+ \btxflushauthor{author}
+ }
+ \btxdoif {year} {
+ \btxlparent\btxflush{year}\btxrparent
+ }
+ \btxperiod
+\stopsetups
+
+\startsetups btx:apa:common:author-or-key-and-year
+ \btxdoifelse {author} {
+ \btxflushauthor{author}
+ } {
+ \btxdoif {key} {
+ \btxlbracket\btxsetup{btx:format:key}\btxrbracket
+ }
+ }
+ \btxdoif {year} {
+ \btxlparent\btxflush{year}\btxrparent
+ }
+ \btxperiod
+\stopsetups
+
+\startsetups btx:apa:common:author-editors-crossref-year
+ \btxdoif {author} {
+ \btxflushauthor{author}
+ } {
+ \btxdoifelse {editor} {
+ \enablemode[btx:apa:edited-book]
+ \btxflushauthor{editor}
+ \btxcomma\btxsingularplural{editor}{editor}{editors}
+ } {
+ % weird period
+ \btxdoif {crossref} {
+ \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket\btxperiod
+ }
+ }
+ }
+ \btxdoif {year} {
+ \btxlparent\btxflush{year}\btxrparent
+ }
+ \btxperiod
+\stopsetups
+
+\startsetups btx:apa:common:editor-or-key-and-year
+ \btxdoifelse {editor} {
+ \enablemode[btx:apa:edited-book]
+ \btxflushauthor{editor}
+ \btxcomma\btxsingularplural{editor}{editor}{editors}
+ } {
+ \btxdoif {key} {
+ \btxlbracket\btxsetup{btx:format:key}\btxrbracket
+ }
+ }
+ \btxspace
+ \btxdoif {year} {
+ \btxlparent\btxflush{year}\btxrparent
+ }
+ \btxperiod
+\stopsetups
+
+\startsetups btx:apa:common:note
+ \btxdoif {note} {
+ \btxspace\btxflush{note}\btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:comment
+ \btxdoif {comment} {
+ \btxspace\btxflush{comment}\btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:pages:p
+ \btxdoif {pages} {
+ \btxspace\btxflush{pages}\btxspace p\btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:pages:pp
+ \btxdoif {pages} {
+ \btxspace\btxflush{pages}\btxspace pp\btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:pages:pages
+ \btxdoif {pages} {
+ \btxcomma pages~\btxflush{pages}
+ }
+\stopsetups
+
+\startsetups btx:apa:common:edition:sentense
+ \btxdoif {edition} {
+ \btxspace\btxflush{edition}\btxspace edition\btxperiod
+ }
+\stopsetups
+
+% check when the next is used (no period)
+
+% \startsetups btx:apa:common:edition
+% \btxdoif {edition} {
+% \btxspace\btxflush{edition}\btxspace edition
+% }
+% \stopsetups
+
+% we can share more, todo
+
+% specific
+
+\startsetups btx:apa:article
+ \btxsetup{btx:apa:common:author-or-key-and-year}
+ \btxdoif {title} {
+ \btxflush{title}\btxperiod
+ }
+ \btxdoifelse {journal} {
+ \bgroup\it\btxflush{journal}\/\egroup
+ } {
+ \btxdoif {crossref} {
+ In\btxspace\btxflush{crossref}
+ }
+ }
+ \btxdoifelse {volume} {
+ \btxcomma\bgroup\it\btxflush{volume}\/\egroup
+ \btxdoif {issue} {
+ \btxlparent\btxflush{issue}\btxrparent
+ }
+ \btxdoif {pages} {
+ \btxcomma\btxflush{pages}
+ }
+ \btxperiod
+ } {
+ \btxsetup{btx:apa:common:pages:pp}
+ }
+ \btxsetup{btx:apa:common:note}
+ \btxsetup{btx:apa:common:comment}
+\stopsetups
+
+\startsetups btx:apa:book
+ \btxsetup{btx:apa:common:author-editors-crossref-year}
+ \btxdoif {title} {
+ \bgroup\it\btxflush{title}\/\egroup
+ \doifmodeelse {btx:apa:edited-book} {
+ \btxdoifelse {volume} {
+ \btxspace Number\nonbreakablespace\btxflush{volume}
+ \btxdoifelse {series} {
+ \btxspace in\nonbreakablespace\btxflush{series}\btxperiod
+ } {
+ \btxdoifelse {crossref} {
+ \btxspace in\btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ } {
+ \btxperiod
+ }
+ }
+ } {
+ \btxdoif {series} {
+ \btxspace\btxflush{series}
+ }
+ \btxperiod
+ }
+ } {
+ \btxdoifelse {crossref} {
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ \btxdoif {volume} {
+ Volume\nonbreakablespace\btxflush{volume}\btxspace of\nonbreakablespace
+ }
+ } {
+ \btxdoif {volume} {
+ \btxcomma volume\nonbreakablespace\btxflush{volume}
+ \btxdoif {series} {
+ \btxspace of\nonbreakablespace\bgroup\it\btxflush{series}\/\egroup
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ }
+ \btxperiod
+ }
+ }
+ }
+ \btxsetup{btx:apa:common:edition:sentence}
+ \btxsetup{btx:apa:common:publisher:sentence}
+ \btxsetup{btx:apa:common:pages:p}% twice?
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:inbook
+ \btxsetup{btx:apa:common:author-editors-crossref-year}
+ \btxdoifelse {title} {
+ \bgroup\it\btxflush{title}\/\egroup
+ } {
+ \doifmodeelse {btx:apa:edited-book} {
+ \btxdoifelse {volume} {
+ \btxspace number\nonbreakablespace\btxflush{volume}
+ \btxdoifelse {series} {
+ \btxspace in\nonbreakablespace\btxflush{series}\btxperiod
+ } {
+ \btxdoifelse {crossref} {
+ \btxspace in\btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ } {
+ \btxperiod
+ }
+ }
+ } {
+ \btxdoif {series} {
+ \btxspace\btxflush{series}\btxperiod
+ }
+ }
+ } {
+ \btxdoifelse {crossref} {
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxdoif {volume} {
+ Volume\nonbreakablespace\btxflush{volume}\btxspace of\nonbreakablespace
+ }
+ \btxdoif {crossref} {
+ \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ }
+ } {
+ \btxdoif {volume} {
+ \btxcomma volume\nonbreakablespace\btxflush{volume}
+ \btxdoif {series} {
+ \btxspace of\nonbreakablespace\bgroup\it\btxflush{series}\/\egroup
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ }
+ }
+ }
+ }
+ \btxspace
+ \btxsetup{btx:apa:common:edition:sentence}
+ \btxsetup{btx:apa:common:publisher}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:booklet
+ \btxsetup{btx:apa:common:author-or-key-and-year}
+ \btxsetup{btx:apa:common:title-it-and-series}
+ \btxsetup{btx:apa:common:edition:sentence}
+ \btxsetup{btx:apa:common:publication:sentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:manual
+ \btxsetup{btx:apa:common:author-or-key-and-year}
+ \btxsetup{btx:apa:common:title-it-and-series}
+ \btxsetup{btx:apa:common:edition:sentence}
+ \btxsetup{btx:apa:common:organization:sentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:incollection
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxdoif {arttitle} {
+ \btxflush{arttitle}\btxperiod
+ }
+ In\btxspace
+ \btxdoifelse {title} {
+ \btxflushauthor{editor}\btxcomma
+ \bgroup\it\btxflush{title}\/\egroup
+ \btxdoif {series} {
+ \btxdoif {volume} {
+ \btxcomma number\btxspace\btxflush{volume}\btxspace in
+ }
+ \btxspace\btxflush{series}
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}\btxspace
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxdoif {edition} {
+ \btxspace\btxflush{edition}\btxspace edition
+ }
+ \btxsetup{btx:apa:common:publisher:sentence}
+ } {
+ \btxdoif {crossref} {
+ \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxspace
+ \btxsetup{btx:apa:common:pages:pages}
+ }
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:inproceedings
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxdoif {arttitle} {
+ \btxflush{arttitle}\btxperiod
+ }
+ In\btxspace
+ \btxdoifelse {title} {
+ \btxdoif {editor} {
+ \btxflush{btx:apa:format:editors}
+ \btxcomma\btxsingularplural{editor}{editor}{editors}\btxcomma
+ }
+ \bgroup\it\btxflush{title}\/\egroup
+ \btxdoif {series} {
+ \btxdoif {volume} {
+ \btxcomma number~\btxflush{volume} in
+ }
+ \btxspace
+ \btxflush{series}
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}\btxspace
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ \btxsetup{btx:apa:common:organization:sentence}
+ } {
+ \btxdoif {crossref} {
+ \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}\btxspace
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ }
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:proceedings
+ \btxsetup{btx:apa:common:editor-or-key-and-year}
+ \btxdoif {title} {
+ \bgroup\it\btxflush{title}\/\egroup
+ \btxdoif {volume} {
+ \btxcomma number\btxspace\btxflush{volume}\btxspace in\btxspace
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}\btxspace
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ \btxsetup{btx:apa:common:organization:sentence}
+ }
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:common:thesis
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxsetup{btx:apa:common:title-it-and-series}
+ \btxdoifelse {type} {
+ \btxflush{type}
+ } {
+ \getvariable{btx:temp}{label}
+ }
+ \btxsetup{btx:apa:common:school:subsentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:mastersthesis
+ \setvariables[btx:temp][label=Master's thesis]
+ \btxsetup{btx:apa:common:thesis}
+\stopsetups
+
+\startsetups btx:apa:phdthesis
+ \setvariables[btx:temp][label=PhD thesis]
+ \btxsetup{btx:apa:common:thesis}
+\stopsetups
+
+\startsetups btx:apa:techreport
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxsetup{btx:apa:common:title-and-series}
+ \btxdoifelse {type} {
+ \btxflush{type}
+ \btxdoif {volume} {
+ \btxspace\btxflush{volume}
+ }
+ } {
+ \btxspace Technical Report
+ }
+ \btxsetup{btx:apa:common:institution:subsentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:misc
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxsetup{btx:apa:common:title-and-series}
+ \btxsetup{btx:apa:common:publisher:sentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:unpublished
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxsetup{btx:apa:common:title-and-series}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxdoif {type} {
+ \btxlparent\btxflush{type}\btxrparent
+ }
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\endinput
diff --git a/Master/texmf-dist/tex/context/base/publ-imp-cite.mkiv b/Master/texmf-dist/tex/context/base/publ-imp-cite.mkiv
new file mode 100644
index 00000000000..d64c2132cee
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-imp-cite.mkiv
@@ -0,0 +1,74 @@
+%D \module
+%D [ file=publ-imp-cite,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=XML,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startsetups btx:cite:author
+ \btxcitevariant{author}
+\stopsetups
+
+\startsetups btx:cite:authoryear
+ \btxcitevariant{authoryear}
+\stopsetups
+
+\startsetups btx:cite:authoryears
+ \btxcitevariant{authoryears}
+\stopsetups
+
+% \startsetups btx:cite:authornum
+% \btxcitevariant{author}
+% \btxcitevariantparameter\c!inbetween
+% \btxcitevariant{num}
+% \stopsetups
+
+\startsetups btx:cite:authornum
+ \btxcitevariant{authornum}
+\stopsetups
+
+\startsetups btx:cite:year
+ \btxcitevariant{year}
+\stopsetups
+
+\startsetups btx:cite:short
+ \btxcitevariant{short}
+\stopsetups
+
+\startsetups btx:cite:serial
+ \btxcitevariant{serial}
+\stopsetups
+
+\startsetups btx:cite:key
+ \currentbtxtag % \btxcitevariant{tag}
+\stopsetups
+
+\startsetups btx:cite:doi
+ todo: \btxcitevariant{doi}
+\stopsetups
+
+\startsetups btx:cite:url
+ todo: \btxcitevariant{url}
+\stopsetups
+
+\startsetups btx:cite:type
+ \btxcitevariant{category}
+\stopsetups
+
+\startsetups btx:cite:page
+ \btxcitevariant{page}
+\stopsetups
+
+\startsetups btx:cite:none
+ % dummy
+\stopsetups
+
+\startsetups btx:cite:num
+ \btxcitevariant{num}
+\stopsetups
diff --git a/Master/texmf-dist/tex/context/base/publ-imp-commands.mkiv b/Master/texmf-dist/tex/context/base/publ-imp-commands.mkiv
new file mode 100644
index 00000000000..14e2dbae12d
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-imp-commands.mkiv
@@ -0,0 +1,15 @@
+\unprotect
+
+% for tugboat
+
+\definebtxcommand\hbox {\hbox}
+\definebtxcommand\vbox {\vbox}
+\definebtxcommand\llap {\llap}
+\definebtxcommand\rlap {\rlap}
+\definebtxcommand\url #1{\hyphenatedurl{#1}}
+\definebtxcommand\acro #1{\dontleavehmode{\smallcaps#1}}
+
+\let\<<
+\let\<>
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/publ-imp-definitions.mkiv b/Master/texmf-dist/tex/context/base/publ-imp-definitions.mkiv
new file mode 100644
index 00000000000..2cf2e3e8efe
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-imp-definitions.mkiv
@@ -0,0 +1,68 @@
+%D \module
+%D [ file=publ-imp-def,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Definitions,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D Here we collect some helper setups. We assume that checking of a field
+%D happens in the calling setup, if only because that is the place where
+%D fences are also dealt with.
+
+\unprotect
+
+\startxmlsetups btx:format:crossref
+ \cite[\btxfield{crossref}]
+\stopxmlsetups
+
+\startxmlsetups btx:format:key
+ \btxfield{short}
+\stopxmlsetups
+
+\startxmlsetups btx:format:doi
+ \edef\currentbtxfielddoi{\btxfield{doi}}
+ \ifx\currentbtxfielddoi\empty
+ {\tttf no-doi}
+ \else\ifconditional\btxinteractive
+ \goto{\hyphenatedurl{\currentbtxfielddoi}}[url(http://dx.doi.org/\currentbtxfielddoi)]
+ \else
+ \hyphenatedurl{\currentbtxfielddoi}
+ \fi\fi
+\stopxmlsetups
+
+\startxmlsetups btx:format:url
+ \edef\currentbtxfieldurl{\btxfield{url}}
+ \ifx\currentbtxfieldurl\empty
+ {\tttf no-url}
+ \else\ifconditional\btxinteractive
+ \goto{\hyphenatedurl{\currentbtxfieldurl}}[url(\currentbtxfieldurl)]
+ \else
+ \hyphenatedurl{\currentbtxfieldurl}
+ \fi\fi
+\stopxmlsetups
+
+\startxmlsetups btx:format:month
+ \edef\currentbtxfieldmonth{\btxfield{month}}
+ \ifx\currentbtxfieldmonth\empty
+ {\tttf no-month}
+ \else
+ \edef\p_monthconversion{\btxlistvariantparameter\c!monthconversion}
+ \ifx\p_monthconversion\empty % month month:mnem
+ \currentbtxfieldmonth
+ \else
+ \doifnumberelse \currentbtxfieldmonth {
+ \convertnumber\p_monthconversion\currentbtxfieldmonth
+ } {
+ \currentbtxfieldmonth
+ }
+ \fi
+ \fi
+\stopxmlsetups
+
+\protect
diff --git a/Master/texmf-dist/tex/context/base/publ-ini.lua b/Master/texmf-dist/tex/context/base/publ-ini.lua
new file mode 100644
index 00000000000..e25c57e297d
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-ini.lua
@@ -0,0 +1,1431 @@
+if not modules then modules = { } end modules ['publ-ini'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- for the moment here
+
+local lpegmatch = lpeg.match
+local P, C, Ct, Cs = lpeg.P, lpeg.C, lpeg.Ct, lpeg.Cs
+
+local lpegmatch = lpeg.match
+local pattern = Cs((1 - P(1) * P(-1))^0 * (P(".")/"" + P(1)))
+
+local manipulators = {
+ stripperiod = function(str) return lpegmatch(pattern,str) end,
+ uppercase = characters.upper,
+ lowercase = characters.lower,
+}
+
+local manipulation = C((1-P("->"))^1) * P("->") * C(P(1)^0)
+
+local pattern = manipulation / function(operation,str)
+ local manipulator = manipulators[operation]
+ return manipulator and manipulator(str) or str
+end
+
+local function manipulated(str)
+ return lpegmatch(pattern,str) or str
+end
+
+utilities.parsers.manipulation = manipulation
+utilities.parsers.manipulators = manipulators
+utilities.parsers.manipulated = manipulated
+
+function commands.manipulated(str)
+ context(manipulated(str))
+end
+
+-- use: for rest in gmatch(reference,"[^, ]+") do
+
+local next, rawget, type = next, rawget, type
+local match, gmatch, format, gsub = string.match, string.gmatch, string.format, string.gsub
+local concat, sort = table.concat, table.sort
+local utfsub = utf.sub
+local formatters = string.formatters
+local allocate = utilities.storage.allocate
+local settings_to_array, settings_to_set = utilities.parsers.settings_to_array, utilities.parsers.settings_to_set
+local sortedkeys, sortedhash = table.sortedkeys, table.sortedhash
+local lpegmatch = lpeg.match
+local P, C, Ct = lpeg.P, lpeg.C, lpeg.Ct
+
+local report = logs.reporter("publications")
+local trace = false trackers.register("publications", function(v) trace = v end)
+
+local datasets = publications.datasets
+
+local variables = interfaces.variables
+
+local v_local = variables["local"]
+local v_global = variables["global"]
+
+local v_force = variables.force
+local v_standard = variables.standard
+local v_start = variables.start
+local v_none = variables.none
+local v_left = variables.left
+local v_right = variables.right
+local v_middle = variables.middle
+local v_inbetween = variables.inbetween
+
+local v_short = variables.short
+local v_cite = variables.cite
+local v_default = variables.default
+local v_reference = variables.reference
+local v_dataset = variables.dataset
+local v_author = variables.author or "author"
+local v_editor = variables.editor or "editor"
+
+local numbertochar = converters.characters
+
+local logsnewline = logs.newline
+local logspushtarget = logs.pushtarget
+local logspoptarget = logs.poptarget
+local csname_id = token.csname_id
+
+local basicsorter = sorters.basicsorter -- (a,b)
+local sortcomparer = sorters.comparers.basic -- (a,b)
+local sortstripper = sorters.strip
+local sortsplitter = sorters.splitters.utf
+
+local context = context
+
+local ctx_btxlistparameter = context.btxlistparameter
+local ctx_btxcitevariantparameter = context.btxcitevariantparameter
+local ctx_btxlistvariantparameter = context.btxlistvariantparameter
+local ctx_btxdomarkcitation = context.btxdomarkcitation
+local ctx_setvalue = context.setvalue
+local ctx_firstoftwoarguments = context.firstoftwoarguments
+local ctx_secondoftwoarguments = context.secondoftwoarguments
+local ctx_firstofoneargument = context.firstofoneargument
+local ctx_gobbleoneargument = context.gobbleoneargument
+local ctx_btxdirectlink = context.btxdirectlink
+local ctx_btxhandlelistentry = context.btxhandlelistentry
+local ctx_btxchecklistentry = context.btxchecklistentry
+local ctx_dodirectfullreference = context.dodirectfullreference
+local ctx_directsetup = context.directsetup
+
+statistics.register("publications load time", function()
+ local publicationsstats = publications.statistics
+ local nofbytes = publicationsstats.nofbytes
+ if nofbytes > 0 then
+ return string.format("%s seconds, %s bytes, %s definitions, %s shortcuts",
+ statistics.elapsedtime(publications),nofbytes,publicationsstats.nofdefinitions,publicationsstats.nofshortcuts)
+ else
+ return nil
+ end
+end)
+
+luatex.registerstopactions(function()
+ local done = false
+ local undefined = csname_id("undefined*crap")
+ for name, dataset in sortedhash(datasets) do
+ for command, n in sortedhash(dataset.commands) do
+ if not done then
+ logspushtarget("logfile")
+ logsnewline()
+ report("start used btx commands")
+ logsnewline()
+ done = true
+ end
+ local c = csname_id(command)
+ if c and c ~= undefined then
+ report("%-20s %-20s % 5i %s",name,command,n,"known")
+ else
+ local u = csname_id(utf.upper(command))
+ if u and u ~= undefined then
+ report("%-20s %-20s % 5i %s",name,command,n,"KNOWN")
+ else
+ report("%-20s %-20s % 5i %s",name,command,n,"unknown")
+ end
+ end
+ end
+ end
+ if done then
+ logsnewline()
+ report("stop used btx commands")
+ logsnewline()
+ logspoptarget()
+ end
+end)
+
+-- multipass, we need to sort because hashing is random per run and not per
+-- version (not the best changed feature of lua)
+
+local collected = allocate()
+local tobesaved = allocate()
+
+-- we use a a dedicated (and efficient as it know what it deals with) serializer,
+-- also because we need to ignore the 'details' field
+
+local function serialize(t)
+ local f_key_table = formatters[" [%q] = {"]
+ local f_key_string = formatters[" %s = %q,"]
+ local r = { "return {" }
+ local m = 1
+ for tag, entry in sortedhash(t) do
+ m = m + 1
+ r[m] = f_key_table(tag)
+ local s = sortedkeys(entry)
+ for i=1,#s do
+ local k = s[i]
+ -- if k ~= "details" then
+ m = m + 1
+ r[m] = f_key_string(k,entry[k])
+ -- end
+ end
+ m = m + 1
+ r[m] = " },"
+ end
+ r[m] = "}"
+ return concat(r,"\n")
+end
+
+local function finalizer()
+ local prefix = tex.jobname -- or environment.jobname
+ local setnames = sortedkeys(datasets)
+ for i=1,#setnames do
+ local name = setnames[i]
+ local dataset = datasets[name]
+ local userdata = dataset.userdata
+ local checksum = nil
+ local username = file.addsuffix(file.robustname(formatters["%s-btx-%s"](prefix,name)),"lua")
+ if userdata and next(userdata) then
+ if job.passes.first then
+ local newdata = serialize(userdata)
+ checksum = md5.HEX(newdata)
+ io.savedata(username,newdata)
+ end
+ else
+ os.remove(username)
+ username = nil
+ end
+ local loaded = dataset.loaded
+ local sources = dataset.sources
+ local used = { }
+ for i=1,#sources do
+ local source = sources[i]
+ if loaded[source.filename] ~= "previous" then -- or loaded[source.filename] == "current"
+ used[#used+1] = source
+ end
+ end
+ tobesaved[name] = {
+ usersource = {
+ filename = username,
+ checksum = checksum,
+ },
+ datasources = used,
+ }
+ end
+end
+
+local function initializer()
+ statistics.starttiming(publications)
+collected = publications.collected or collected -- for the moment as we load runtime
+ for name, state in next, collected do
+ local dataset = datasets[name]
+ local datasources = state.datasources
+ local usersource = state.usersource
+ if datasources then
+ for i=1,#datasources do
+ local filename = datasources[i].filename
+ publications.load(dataset,filename,"previous")
+ end
+ end
+ if usersource then
+ dataset.userdata = table.load(usersource.filename) or { }
+ end
+ end
+ statistics.stoptiming(publications)
+ function initializer() end -- will go, for now, runtime loaded
+end
+
+job.register('publications.collected',tobesaved,initializer,finalizer)
+
+if not publications.authors then
+ initializer() -- for now, runtime loaded
+end
+
+-- basic access
+
+local function getfield(dataset,tag,name)
+ local d = datasets[dataset].luadata[tag]
+ return d and d[name]
+end
+
+local function getdetail(dataset,tag,name)
+ local d = datasets[dataset].details[tag]
+ return d and d[name]
+end
+
+function commands.btxsingularorplural(dataset,tag,name) -- todo: make field dependent
+ local d = datasets[dataset].details[tag]
+ if d then
+ d = d[name]
+ end
+ if d then
+ d = #d <= 1
+ end
+ commands.doifelse(d)
+end
+
+-- basic loading
+
+function commands.usebtxdataset(name,filename)
+ publications.load(datasets[name],filename,"current")
+end
+
+function commands.convertbtxdatasettoxml(name,nice)
+ publications.converttoxml(datasets[name],nice)
+end
+
+-- enhancing
+
+local splitauthorstring = publications.authors.splitstring
+
+local pagessplitter = lpeg.splitat(P("-")^1)
+
+-- maybe not redo when already done
+
+function publications.enhance(dataset) -- for the moment split runs (maybe publications.enhancers)
+ statistics.starttiming(publications)
+ if type(dataset) == "string" then
+ dataset = datasets[dataset]
+ end
+ local luadata = dataset.luadata
+ local details = dataset.details
+ -- author, editor
+ for tag, entry in next, luadata do
+ local author = entry.author
+ local editor = entry.editor
+ details[tag] = {
+ author = author and splitauthorstring(author),
+ editor = editor and splitauthorstring(editor),
+ }
+ end
+ -- short
+ local shorts = { }
+ for tag, entry in next, luadata do
+ local author = details[tag].author
+ if author then
+ -- number depends on sort order
+ local t = { }
+ if #author == 0 then
+ -- what
+ else
+ local n = #author == 1 and 3 or 1
+ for i=1,#author do
+ local surnames = author[i].surnames
+ if not surnames or #surnames == 0 then
+ -- error
+ else
+ t[#t+1] = utfsub(surnames[1],1,n)
+ end
+ end
+ end
+ local year = tonumber(entry.year) or 0
+ local short = formatters["%t%02i"](t,math.mod(year,100))
+ local s = shorts[short]
+ if not s then
+ shorts[short] = tag
+ elseif type(s) == "string" then
+ shorts[short] = { s, tag }
+ else
+ s[#s+1] = tag
+ end
+ else
+ --
+ end
+ end
+ for short, tags in next, shorts do
+ if type(tags) == "table" then
+ sort(tags)
+ for i=1,#tags do
+-- details[tags[i]].short = short .. numbertochar(i)
+local detail = details[tags[i]]
+detail.short = short
+detail.suffix = numbertochar(i)
+ end
+ else
+ details[tags].short = short
+ end
+ end
+ -- pages
+ for tag, entry in next, luadata do
+ local pages = entry.pages
+ if pages then
+ local first, last = lpegmatch(pagessplitter,pages)
+ details[tag].pages = first and last and { first, last } or pages
+ end
+ end
+ -- keywords
+ for tag, entry in next, luadata do
+ local keyword = entry.keyword
+ if keyword then
+ details[tag].keyword = settings_to_set(keyword)
+ end
+ end
+ statistics.stoptiming(publications)
+end
+
+function commands.addbtxentry(name,settings,content)
+ local dataset = datasets[name]
+ if dataset then
+ publications.addtexentry(dataset,settings,content)
+ end
+end
+
+function commands.setbtxdataset(name)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ context(name)
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+function commands.setbtxentry(name,tag)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ if dataset.luadata[tag] then
+ context(tag)
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+-- rendering of fields (maybe multiple manipulators)
+
+local manipulation = utilities.parsers.manipulation
+local manipulators = utilities.parsers.manipulators
+
+-- local function checked(field)
+-- local m, f = lpegmatch(manipulation,field)
+-- if m then
+-- return manipulators[m], f or field
+-- else
+-- return nil, field
+-- end
+-- end
+
+local manipulation = Ct((C((1-P("->"))^1) * P("->"))^1) * C(P(1)^0)
+
+local function checked(field)
+ local m, f = lpegmatch(manipulation,field)
+ if m then
+ return m, f or field
+ else
+ return nil, field
+ end
+end
+
+local function manipulated(actions,str)
+ for i=1,#actions do
+ local action = manipulators[actions[i]]
+ if action then
+ str = action(str) or str
+ end
+ end
+ return str
+end
+
+function commands.btxflush(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local manipulator, field = checked(field)
+ local value = fields[field]
+ if type(value) == "string" then
+ -- context(manipulator and manipulator(value) or value)
+ context(manipulator and manipulated(manipulator,value) or value)
+ return
+ end
+ local details = dataset.details[tag]
+ if details then
+ local value = details[field]
+ if type(value) == "string" then
+ -- context(manipulator and manipulator(value) or value)
+ context(manipulator and manipulated(manipulator,value) or value)
+ return
+ end
+ end
+ report("unknown field %a of tag %a in dataset %a",field,tag,name)
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+function commands.btxdetail(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local details = dataset.details[tag]
+ if details then
+ local manipulator, field = checked(field)
+ local value = details[field]
+ if type(value) == "string" then
+ -- context(manipulator and manipulator(value) or value)
+ context(manipulator and manipulated(manipulator,value) or value)
+ else
+ report("unknown detail %a of tag %a in dataset %a",field,tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+function commands.btxfield(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local manipulator, field = checked(field)
+ local value = fields[field]
+ if type(value) == "string" then
+ -- context(manipulator and manipulator(value) or value)
+ context(manipulator and manipulated(manipulator,value) or value)
+ else
+ report("unknown field %a of tag %a in dataset %a",field,tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+-- testing: to be speed up with testcase
+
+function commands.btxdoifelse(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local data = dataset.luadata[tag]
+ local value = data and data[field]
+ if value and value ~= "" then
+ ctx_firstoftwoarguments()
+ return
+ end
+ end
+ ctx_secondoftwoarguments()
+end
+
+function commands.btxdoif(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local data = dataset.luadata[tag]
+ local value = data and data[field]
+ if value and value ~= "" then
+ ctx_firstofoneargument()
+ return
+ end
+ end
+ ctx_gobbleoneargument()
+end
+
+function commands.btxdoifnot(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local data = dataset.luadata[tag]
+ local value = data and data[field]
+ if value and value ~= "" then
+ ctx_gobbleoneargument()
+ return
+ end
+ end
+ ctx_firstofoneargument()
+end
+
+-- -- alternative approach: keep data at the tex end
+
+function publications.listconcat(t)
+ local n = #t
+ if n > 0 then
+ context(t[1])
+ if n > 1 then
+ if n > 2 then
+ for i=2,n-1 do
+ ctx_btxlistparameter("sep")
+ context(t[i])
+ end
+ ctx_btxlistparameter("finalsep")
+ else
+ ctx_btxlistparameter("lastsep")
+ end
+ context(t[n])
+ end
+ end
+end
+
+function publications.citeconcat(t)
+ local n = #t
+ if n > 0 then
+ context(t[1])
+ if n > 1 then
+ if n > 2 then
+ for i=2,n-1 do
+ ctx_btxcitevariantparameter("sep")
+ context(t[i])
+ end
+ ctx_btxcitevariantparameter("finalsep")
+ else
+ ctx_btxcitevariantparameter("lastsep")
+ end
+ context(t[n])
+ end
+ end
+end
+
+function publications.singularorplural(singular,plural)
+ if lastconcatsize and lastconcatsize > 1 then
+ context(plural)
+ else
+ context(singular)
+ end
+end
+
+-- function commands.makebibauthorlist(settings) -- ?
+-- if not settings then
+-- return
+-- end
+-- local dataset = datasets[settings.dataset]
+-- if not dataset or dataset == "" then
+-- return
+-- end
+-- local tag = settings.tag
+-- if not tag or tag == "" then
+-- return
+-- end
+-- local asked = settings_to_array(tag)
+-- if #asked == 0 then
+-- return
+-- end
+-- local compress = settings.compress
+-- local interaction = settings.interactionn == v_start
+-- local limit = tonumber(settings.limit)
+-- local found = { }
+-- local hash = { }
+-- local total = 0
+-- local luadata = dataset.luadata
+-- for i=1,#asked do
+-- local tag = asked[i]
+-- local data = luadata[tag]
+-- if data then
+-- local author = data.a or "Xxxxxxxxxx"
+-- local year = data.y or "0000"
+-- if not compress or not hash[author] then
+-- local t = {
+-- author = author,
+-- name = name, -- first
+-- year = { [year] = name },
+-- }
+-- total = total + 1
+-- found[total] = t
+-- hash[author] = t
+-- else
+-- hash[author].year[year] = name
+-- end
+-- end
+-- end
+-- for i=1,total do
+-- local data = found[i]
+-- local author = data.author
+-- local year = table.keys(data.year)
+-- table.sort(year)
+-- if interaction then
+-- for i=1,#year do
+-- year[i] = formatters["\\bibmaybeinteractive{%s}{%s}"](data.year[year[i]],year[i])
+-- end
+-- end
+-- ctx_setvalue("currentbibyear",concat(year,","))
+-- if author == "" then
+-- ctx_setvalue("currentbibauthor","")
+-- else -- needs checking
+-- local authors = settings_to_array(author) -- {{}{}},{{}{}}
+-- local nofauthors = #authors
+-- if nofauthors == 1 then
+-- if interaction then
+-- author = formatters["\\bibmaybeinteractive{%s}{%s}"](data.name,author)
+-- end
+-- ctx_setvalue("currentbibauthor",author)
+-- else
+-- limit = limit or nofauthors
+-- if interaction then
+-- for i=1,#authors do
+-- authors[i] = formatters["\\bibmaybeinteractive{%s}{%s}"](data.name,authors[i])
+-- end
+-- end
+-- if limit == 1 then
+-- ctx_setvalue("currentbibauthor",authors[1] .. "\\bibalternative{otherstext}")
+-- elseif limit == 2 and nofauthors == 2 then
+-- ctx_setvalue("currentbibauthor",concat(authors,"\\bibalternative{andtext}"))
+-- else
+-- for i=1,limit-1 do
+-- authors[i] = authors[i] .. "\\bibalternative{namesep}"
+-- end
+-- if limit < nofauthors then
+-- authors[limit+1] = "\\bibalternative{otherstext}"
+-- ctx_setvalue("currentbibauthor",concat(authors,"",1,limit+1))
+-- else
+-- authors[limit-1] = authors[limit-1] .. "\\bibalternative{andtext}"
+-- ctx_setvalue("currentbibauthor",concat(authors))
+-- end
+-- end
+-- end
+-- end
+-- -- the following use: currentbibauthor and currentbibyear
+-- if i == 1 then
+-- context.ixfirstcommand()
+-- elseif i == total then
+-- context.ixlastcommand()
+-- else
+-- context.ixsecondcommand()
+-- end
+-- end
+-- end
+
+local patterns = { "publ-imp-%s.mkiv", "publ-imp-%s.tex" }
+
+local function failure(name)
+ report("unknown library %a",name)
+end
+
+local function action(name,foundname)
+ context.input(foundname)
+end
+
+function commands.loadbtxdefinitionfile(name) -- a more specific name
+ commands.uselibrary {
+ name = gsub(name,"^publ%-",""),
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = false,
+ }
+end
+
+-- lists:
+
+publications.lists = publications.lists or { }
+local lists = publications.lists
+
+local context = context
+local structures = structures
+
+local references = structures.references
+local sections = structures.sections
+
+-- per rendering
+
+local renderings = { } --- per dataset
+
+table.setmetatableindex(renderings,function(t,k)
+ local v = {
+ list = { },
+ done = { },
+ alldone = { },
+ used = { },
+ registered = { },
+ ordered = { },
+ shorts = { },
+ method = v_none,
+ currentindex = 0,
+ }
+ t[k] = v
+ return v
+end)
+
+-- why shorts vs tags: only for sorting
+
+function lists.register(dataset,tag,short) -- needs checking now that we split
+ local r = renderings[dataset]
+ if not short or short == "" then
+ short = tag
+ end
+ if trace then
+ report("registering publication entry %a with shortcut %a",tag,short)
+ end
+ local top = #r.registered + 1
+ -- do we really need these
+ r.registered[top] = tag
+ r.ordered [tag] = top
+ r.shorts [tag] = short
+end
+
+function lists.nofregistered(dataset)
+ return #renderings[dataset].registered
+end
+
+function lists.setmethod(dataset,method)
+ local r = renderings[dataset]
+ r.method = method or v_none
+ r.list = { }
+ r.done = { }
+end
+
+local function validkeyword(dataset,tag,keyword)
+ local ds = datasets[dataset]
+ if not ds then
+ report("unknown dataset %a",dataset)
+ return
+ end
+ local dt = ds.details[tag]
+ if not dt then
+ report("no details for tag %a",tag)
+ return
+ end
+ local kw = dt.keyword
+ if kw then
+-- inspect(keyword)
+-- inspect(kw)
+ for k in next, keyword do
+ if kw[k] then
+ return true
+ end
+ end
+ end
+end
+
+function lists.collectentries(specification)
+ local dataset = specification.btxdataset
+ if not dataset then
+ return
+ end
+ local rendering = renderings[dataset]
+-- specification.names = "btx"
+ local method = rendering.method
+ if method == v_none then
+ return
+ end
+-- method=v_local --------------------
+ local result = structures.lists.filter(specification)
+ --
+ local keyword = specification.keyword
+ if keyword and keyword ~= "" then
+ keyword = settings_to_set(keyword)
+ else
+ keyword = nil
+ end
+ lists.result = result
+ local section = sections.currentid()
+ local list = rendering.list
+ local done = rendering.done
+ local alldone = rendering.alldone
+ if method == v_local then
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata
+ if u and u.btxset == dataset then
+ local tag = u.btxref
+ if tag and done[tag] ~= section then
+ if not keyword or validkeyword(dataset,tag,keyword) then
+ done[tag] = section
+ alldone[tag] = true
+ list[#list+1] = { tag, listindex }
+ end
+ end
+ end
+ end
+ elseif method == v_global then
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata
+ if u and u.btxset == dataset then
+ local tag = u.btxref
+ if tag and not alldone[tag] and done[tag] ~= section then
+ if not keyword or validkeyword(dataset,tag,keyword) then
+ done[tag] = section
+ alldone[tag] = true
+ list[#list+1] = { tag, listindex }
+ end
+ end
+ end
+ end
+ elseif method == v_force then
+ -- only for checking, can have duplicates, todo: collapse page numbers, although
+ -- we then also needs deferred writes
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata
+ if u and u.btxset == dataset then
+ local tag = u.btxref
+ if tag then
+ if not keyword or validkeyword(dataset,tag,keyword) then
+ list[#list+1] = { tag, listindex }
+ end
+ end
+ end
+ end
+ elseif method == v_dataset then
+ local luadata = datasets[dataset].luadata
+ for tag, data in table.sortedhash(luadata) do
+ if not keyword or validkeyword(dataset,tag,keyword) then
+ list[#list+1] = { tag }
+ end
+ end
+ end
+end
+
+lists.sorters = {
+ [v_short] = function(dataset,rendering,list)
+ local shorts = rendering.shorts
+ local function compare(a,b)
+ local aa, bb = a and a[1], b and b[1]
+ if aa and bb then
+ aa, bb = shorts[aa], shorts[bb]
+ return aa and bb and aa < bb
+ end
+ return false
+ end
+ sort(list,compare)
+ end,
+ [v_reference] = function(dataset,rendering,list)
+ local function compare(a,b)
+ local aa, bb = a and a[1], b and b[1]
+ if aa and bb then
+ return aa and bb and aa < bb
+ end
+ return false
+ end
+ sort(list,compare)
+ end,
+ [v_dataset] = function(dataset,rendering,list)
+ local function compare(a,b)
+ local aa, bb = a and a[1], b and b[1]
+ if aa and bb then
+ aa, bb = list[aa].index or 0, list[bb].index or 0
+ return aa and bb and aa < bb
+ end
+ return false
+ end
+ sort(list,compare)
+ end,
+ -- [v_default] = function(dataset,rendering,list) -- not really needed
+ -- local ordered = rendering.ordered
+ -- local function compare(a,b)
+ -- local aa, bb = a and a[1], b and b[1]
+ -- if aa and bb then
+ -- aa, bb = ordered[aa], ordered[bb]
+ -- return aa and bb and aa < bb
+ -- end
+ -- return false
+ -- end
+ -- sort(list,compare)
+ -- end,
+ [v_author] = function(dataset,rendering,list)
+ local valid = publications.authors.preparedsort(dataset,list,v_author,v_editor)
+ if #valid == 0 or #valid ~= #list then
+ -- nothing to sort
+ else
+ -- if needed we can wrap compare and use the list directly but this is cleaner
+ sorters.sort(valid,sortcomparer)
+ for i=1,#valid do
+ local v = valid[i]
+ valid[i] = list[v.index]
+ end
+ return valid
+ end
+ end,
+}
+
+function lists.flushentries(dataset,sortvariant)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local sort = lists.sorters[sortvariant] or lists.sorters[v_default]
+ if type(sort) == "function" then
+ list = sort(dataset,rendering,list) or list
+ end
+ for i=1,#list do
+ ctx_setvalue("currentbtxindex",i)
+ ctx_btxhandlelistentry(list[i][1]) -- we can pass i here too ... more efficient to avoid the setvalue
+ end
+end
+
+function lists.fetchentries(dataset)
+ local list = renderings[dataset].list
+ for i=1,#list do
+ ctx_setvalue("currentbtxindex",i)
+ ctx_btxchecklistentry(list[i][1])
+ end
+end
+
+function lists.filterall(dataset)
+ local r = renderings[dataset]
+ local list = r.list
+ local registered = r.registered
+ for i=1,#registered do
+ list[i] = { registered[i], i }
+ end
+end
+
+function lists.registerplaced(dataset,tag)
+ renderings[dataset].used[tag] = true
+end
+
+function lists.doifalreadyplaced(dataset,tag)
+ commands.doifelse(renderings[dataset].used[tag])
+end
+
+-- we ask for :tag but when we can't find it we go back
+-- to look for previous definitions, and when not found again
+-- we look forward
+
+local function compare(a,b)
+ local aa, bb = a and a[3], b and b[3]
+ return aa and bb and aa < bb
+end
+
+-- maybe hash subsets
+-- how efficient is this? old leftovers?
+
+-- rendering ?
+
+local f_reference = formatters["r:%s:%s:%s"] -- dataset, instance (block), tag
+local f_destination = formatters["d:%s:%s:%s"] -- dataset, instance (block), tag
+
+function lists.resolve(dataset,reference) -- maybe already feed it split
+ -- needs checking (the prefix in relation to components)
+ local subsets = nil
+ local block = tex.count.btxblock
+ local collected = references.collected
+ local prefix = nil -- todo: dataset ?
+ if prefix and prefix ~= "" then
+ subsets = { collected[prefix] or collected[""] }
+ else
+ local components = references.productdata.components
+ local subset = collected[""]
+ if subset then
+ subsets = { subset }
+ else
+ subsets = { }
+ end
+ for i=1,#components do
+ local subset = collected[components[i]]
+ if subset then
+ subsets[#subsets+1] = subset
+ end
+ end
+ end
+-- inspect(subsets)
+ if #subsets > 0 then
+ local result, nofresult, done = { }, 0, { }
+ for i=1,#subsets do
+ local subset = subsets[i]
+ for rest in gmatch(reference,"[^, ]+") do
+ local blk, tag, found = block, nil, nil
+ if block then
+ tag = f_destination(dataset,blk,rest)
+ found = subset[tag]
+ if not found then
+ for i=block-1,1,-1 do
+ tag = f_destination(dataset,blk,rest)
+-- tag = i .. ":" .. rest
+ found = subset[tag]
+ if found then
+ blk = i
+ break
+ end
+ end
+ end
+ end
+ if not found then
+ blk = "*"
+ tag = f_destination(dataset,blk,rest)
+ found = subset[tag]
+ end
+ if found then
+ local current = tonumber(found.entries and found.entries.text) -- tonumber needed
+ if current and not done[current] then
+ nofresult = nofresult + 1
+ result[nofresult] = { blk, rest, current }
+ done[current] = true
+ end
+ end
+ end
+ end
+ local first, last, firsti, lasti, firstr, lastr
+ local collected, nofcollected = { }, 0
+ for i=1,nofresult do
+ local r = result[i]
+ local current = r[3]
+ if not first then
+ first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r
+ elseif current == last + 1 then
+ last, lasti, lastr = current, i, r
+ else
+ if last > first + 1 then
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = { firstr, lastr }
+ else
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = firstr
+ if last > first then
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = lastr
+ end
+ end
+ first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r
+ end
+ end
+ if first and last then
+ if last > first + 1 then
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = { firstr, lastr }
+ else
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = firstr
+ if last > first then
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = lastr
+ end
+ end
+ end
+ if nofcollected > 0 then
+-- inspect(reference)
+-- inspect(result)
+-- inspect(collected)
+ for i=1,nofcollected do
+ local c = collected[i]
+ if i == nofcollected then
+ ctx_btxlistvariantparameter("lastpubsep")
+ elseif i > 1 then
+ ctx_btxlistvariantparameter("pubsep")
+ end
+ if #c == 3 then -- a range (3 is first or last)
+ ctx_btxdirectlink(f_reference(dataset,c[1],c[2]),c[3])
+ else
+ local f, l = c[2], c[2]
+ ctx_btxdirectlink(f_reference(dataset,f[1],f[2]),f[3])
+ context.endash() -- to do
+ ctx_btxdirectlink(f_reference(dataset,l[4],l[5]),l[6])
+ end
+ end
+ else
+ context("[btx error 1]")
+ end
+ else
+ context("[btx error 2]")
+ end
+end
+
+local done = { }
+
+function commands.btxreference(dataset,block,tag,data)
+ local ref = f_reference(dataset,block,tag)
+ if not done[ref] then
+ done[ref] = true
+-- context("<%s>",data)
+ ctx_dodirectfullreference(ref,data)
+ end
+end
+
+local done = { }
+
+function commands.btxdestination(dataset,block,tag,data)
+ local ref = f_destination(dataset,block,tag)
+ if not done[ref] then
+ done[ref] = true
+-- context("<<%s>>",data)
+ ctx_dodirectfullreference(ref,data)
+ end
+end
+
+commands.btxsetlistmethod = lists.setmethod
+commands.btxresolvelistreference = lists.resolve
+commands.btxregisterlistentry = lists.registerplaced
+commands.btxaddtolist = lists.addentry
+commands.btxcollectlistentries = lists.collectentries
+commands.btxfetchlistentries = lists.fetchentries
+commands.btxflushlistentries = lists.flushentries
+commands.btxdoifelselistentryplaced = lists.doifalreadyplaced
+
+local citevariants = { }
+publications.citevariants = citevariants
+
+-- helper
+
+local function sortedtags(dataset,list,sorttype)
+ local luadata = datasets[dataset].luadata
+ local valid = { }
+ for i=1,#list do
+ local tag = list[i]
+ local entry = luadata[tag]
+ if entry then
+ local key = entry[sorttype]
+ if key then
+ valid[#valid+1] = {
+ tag = tag,
+ split = sortsplitter(sortstripper(key))
+ }
+ else
+ end
+ end
+ end
+ if #valid == 0 or #valid ~= #list then
+ return list
+ else
+ sorters.sort(valid,basicsorter)
+ for i=1,#valid do
+ valid[i] = valid[i].tag
+ end
+ return valid
+ end
+end
+
+-- todo: standard : current
+
+local prefixsplitter = lpeg.splitat("::")
+
+function commands.btxhandlecite(dataset,tag,mark,variant,sorttype,setup) -- variant for tracing
+ local prefix, rest = lpegmatch(prefixsplitter,tag)
+ if rest then
+ dataset = prefix
+ else
+ rest = tag
+ end
+ ctx_setvalue("currentbtxdataset",dataset)
+ local tags = settings_to_array(rest)
+ if #tags > 0 then
+ if sorttype and sorttype ~= "" then
+ tags = sortedtags(dataset,tags,sorttype)
+ end
+ ctx_btxcitevariantparameter(v_left)
+ for i=1,#tags do
+ local tag = tags[i]
+ ctx_setvalue("currentbtxtag",tag)
+ if i > 1 then
+ ctx_btxcitevariantparameter(v_middle)
+ end
+ if mark ~= false then
+ ctx_btxdomarkcitation(dataset,tag)
+ end
+ ctx_directsetup(setup) -- cite can become alternative
+ end
+ ctx_btxcitevariantparameter(v_right)
+ else
+ -- error
+ end
+end
+
+function commands.btxhandlenocite(dataset,tag,mark)
+ if mark ~= false then
+ local prefix, rest = lpegmatch(prefixsplitter,tag)
+ if rest then
+ dataset = prefix
+ else
+ rest = tag
+ end
+ ctx_setvalue("currentbtxdataset",dataset)
+ local tags = settings_to_array(rest)
+ for i=1,#tags do
+ ctx_btxdomarkcitation(dataset,tags[i])
+ end
+ end
+end
+
+function commands.btxcitevariant(dataset,block,tags,variant)
+ local action = citevariants[variant] or citevariants.default
+ if action then
+ action(dataset,tags,variant)
+ end
+end
+
+function citevariants.default(dataset,tags,variant)
+ local content = getfield(dataset,tags,variant)
+ if content then
+ context(content)
+ end
+end
+
+-- todo : sort
+-- todo : choose between publications or commands namespace
+-- todo : use details.author
+-- todo : sort details.author
+
+local function collectauthoryears(dataset,tags)
+ local luadata = datasets[dataset].luadata
+ local list = settings_to_array(tags)
+ local found = { }
+ local result = { }
+ local order = { }
+ for i=1,#list do
+ local tag = list[i]
+ local entry = luadata[tag]
+ if entry then
+ local year = entry.year
+ local author = entry.author
+ if author and year then
+ local a = found[author]
+ if not a then
+ a = { }
+ found[author] = a
+ order[#order+1] = author
+ end
+ local y = a[year]
+ if not y then
+ y = { }
+ a[year] = y
+ end
+ y[#y+1] = tag
+ end
+ end
+ end
+ -- found = { author = { year_1 = { e1, e2, e3 } } }
+ for i=1,#order do
+ local author = order[i]
+ local years = found[author]
+ local yrs = { }
+ for year, entries in next, years do
+ if subyears then
+ -- -- add letters to all entries of an author and if so shouldn't
+ -- -- we tag all years of an author as soon as we do this?
+ -- if #entries > 1 then
+ -- for i=1,#years do
+ -- local entry = years[i]
+ -- -- years[i] = year .. string.char(i + string.byte("0") - 1)
+ -- end
+ -- end
+ else
+ yrs[#yrs+1] = year
+ end
+ end
+ result[i] = { author = author, years = yrs }
+ end
+ return result, order
+end
+
+-- (name, name and name) .. how names? how sorted?
+-- todo: we loop at the tex end .. why not here
+-- \cite[{hh,afo},kvm]
+
+-- maybe we will move this tex anyway
+
+function citevariants.author(dataset,tags)
+ local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
+ publications.citeconcat(order)
+end
+
+local function authorandyear(dataset,tags,formatter)
+ local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
+ for i=1,#result do
+ local r = result[i]
+ order[i] = formatter(r.author,r.years) -- reuse order
+ end
+ publications.citeconcat(order)
+end
+
+function citevariants.authoryear(dataset,tags)
+ authorandyear(dataset,tags,formatters["%s (%, t)"])
+end
+
+function citevariants.authoryears(dataset,tags)
+ authorandyear(dataset,tags,formatters["%s, %, t"])
+end
+
+function citevariants.authornum(dataset,tags)
+ local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
+ publications.citeconcat(order)
+ ctx_btxcitevariantparameter(v_inbetween)
+ lists.resolve(dataset,tags) -- left/right ?
+end
+
+-- function citevariants.short(dataset,tags)
+-- local short = getdetail(dataset,tags,"short")
+-- if short then
+-- context(short)
+-- end
+-- end
+
+function citevariants.short(dataset,tags)
+ local short = getdetail(dataset,tags,"short")
+ local suffix = getdetail(dataset,tags,"suffix")
+ if suffix then
+ context(short .. suffix)
+ elseif short then
+ context(short)
+ end
+end
+
+function citevariants.page(dataset,tags)
+ local pages = getdetail(dataset,tags,"pages")
+ if not pages then
+ -- nothing
+ elseif type(pages) == "table" then
+ context(pages[1])
+ ctx_btxcitevariantparameter(v_inbetween)
+ context(pages[2])
+ else
+ context(pages)
+ end
+end
+
+function citevariants.num(dataset,tags)
+-- ctx_btxdirectlink(f_destination(dataset,block,tags),listindex) -- not okay yet
+ lists.resolve(dataset,tags)
+end
+
+function citevariants.serial(dataset,tags) -- the traditional fieldname is "serial" and not "index"
+ local index = getfield(dataset,tags,"index")
+ if index then
+ context(index)
+ end
+end
+
+-- List variants
+
+local listvariants = { }
+publications.listvariants = listvariants
+
+-- function commands.btxhandlelist(dataset,block,tag,variant,setup)
+-- if sorttype and sorttype ~= "" then
+-- tags = sortedtags(dataset,tags,sorttype)
+-- end
+-- ctx_setvalue("currentbtxtag",tag)
+-- ctx_btxlistvariantparameter(v_left)
+-- ctx_directsetup(setup)
+-- ctx_btxlistvariantparameter(v_right)
+-- end
+
+function commands.btxlistvariant(dataset,block,tags,variant,listindex)
+ local action = listvariants[variant] or listvariants.default
+ if action then
+ action(dataset,block,tags,variant,tonumber(listindex) or 0)
+ end
+end
+
+function listvariants.default(dataset,block,tags,variant)
+ context("?")
+end
+
+function listvariants.num(dataset,block,tags,variant,listindex)
+ ctx_btxdirectlink(f_destination(dataset,block,tags),listindex) -- not okay yet
+end
+
+-- function listvariants.short(dataset,block,tags,variant,listindex)
+-- local short = getdetail(dataset,tags,variant,variant)
+-- if short then
+-- context(short)
+-- end
+-- end
+
+function listvariants.short(dataset,block,tags,variant,listindex)
+ local short = getdetail(dataset,tags,"short","short")
+ local suffix = getdetail(dataset,tags,"suffix","suffix")
+ if suffix then
+ context(short .. suffix)
+ elseif short then
+ context(short)
+ end
+end
diff --git a/Master/texmf-dist/tex/context/base/publ-ini.mkiv b/Master/texmf-dist/tex/context/base/publ-ini.mkiv
new file mode 100644
index 00000000000..adbf8f7fcf0
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-ini.mkiv
@@ -0,0 +1,963 @@
+%D \module
+%D [ file=publ-ini,
+%D version=2013.05.12,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Initialization,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% todo: we cannot use 'default' as this wipes metadata names (maybe no longer do that)
+% todo: \v!cite => \s!cite
+% todo: interface with (ml)bibtex (export -> call -> import)
+% todo: check if 'all' etc are ok ... either use list or use other criterium
+
+% \definecolor[btx:field] [darkred]
+% \definecolor[btx:crossref][darkblue]
+% \definecolor[btx:key] [darkgreen]
+% \definecolor[btx:todo] [darkyellow]
+
+%D We operate on several axis:
+%D
+%D \startitemize[packed]
+%D \startitem we can have several databases (or combinations) \stopitem
+%D \startitem we can add entries to them if needed (coded in tex) \stopitem
+%D \startitem we can have several lists each using one of the databases \stopitem
+%D \startitem we can render each list or citation independently \stopitem
+%D \stopitemize
+%D
+%D We assume that the rendering of a list entry is consistent in a document,
+%D although one can redefine properties if needed. Adding more granularity would
+%D complicate the user interface beyond comprehension.
+
+\writestatus{loading}{ConTeXt Publication Support / Initialization}
+
+\registerctxluafile{publ-dat}{1.001}
+\registerctxluafile{publ-aut}{1.001}
+\registerctxluafile{publ-usr}{1.001}
+\registerctxluafile{publ-ini}{1.001}
+\registerctxluafile{publ-oth}{1.001} % this could become an option
+
+\unprotect
+
+\def\s!btx{btx}
+\def\v!btxlist{btxlist}
+
+% a dedicated construction mechanism
+
+\installcorenamespace {btxlist}
+
+\installcommandhandler \??btxlist {btxlist} \??btxlist
+
+\unexpanded\setvalue{\??constructioninitializer\v!btxlist}%
+ {\let\currentbtxlist \currentconstruction
+ \let\constructionparameter \btxlistparameter
+ \let\detokenizedconstructionparameter\detokenizedbtxlistparameter
+ \let\letconstructionparameter \letbtxlistparameter
+ \let\useconstructionstyleandcolor \usebtxliststyleandcolor
+ \let\setupcurrentconstruction \setupcurrentbtxlist}
+
+\expandafter\let\csname\??constructionmainhandler \v!btxlist\expandafter\endcsname\csname\??constructionmainhandler \v!construction\endcsname
+\expandafter\let\csname\??constructioncommandhandler\v!btxlist\expandafter\endcsname\csname\??constructioncommandhandler\v!construction\endcsname
+\expandafter\let\csname\??constructiontexthandler \v!btxlist\expandafter\endcsname\csname\??constructiontexthandler \v!construction\endcsname
+
+\unexpanded\setvalue{\??constructioncommandhandler\v!btxlist}%
+ {\csname\??constructionstarthandler\v!construction\endcsname
+ \csname\??constructionstophandler \v!construction\endcsname
+ \endgroup}
+
+\unexpanded\setvalue{\??constructionstarthandler\v!btxlist}%
+ {\csname\??constructionstarthandler\v!construction\endcsname}
+
+\unexpanded\setvalue{\??constructionstophandler\v!btxlist}%
+ {\csname\??constructionstophandler\v!construction\endcsname
+ \endgroup}
+
+\unexpanded\def\startbtxlistentry#1%
+ {\begingroup
+ \strc_constructions_initialize{#1}%
+ \csname\??constructionstarthandler\currentconstructionhandler\endcsname}
+
+\unexpanded\def\stopbtxlistentry
+ {\csname\??constructionstophandler\currentconstructionhandler\endcsname}
+
+\unexpanded\setvalue{\??constructiontexthandler\v!btxlist}%
+ {\begingroup
+ \useconstructionstyleandcolor\c!headstyle\c!headcolor % move to \currentconstructiontext
+ \the\everyconstruction
+ \constructionparameter\c!headcommand
+ {\strut
+ \constructionparameter\c!text
+ \btx_reference_inject}%
+ \endgroup}
+
+\unexpanded\def\strc_constructions_initialize#1% class instance
+ {\edef\currentconstruction{#1}%
+ \let\currentconstructionlistentry\!!zerocount
+ \expandafter\let\expandafter\currentconstructionmain \csname\??constructionmain \currentconstruction\endcsname
+ \expandafter\let\expandafter\currentconstructionlevel \csname\??constructionlevel\currentconstruction\endcsname
+ \expandafter\let\expandafter\currentconstructionhandler\csname\??constructionclass\currentconstruction\endcsname
+ \csname\??constructioninitializer\currentconstructionhandler\endcsname}
+
+\appendtoks
+ % \ifx\currentbtxlistparent\empty
+ % \defineconstruction[\currentbtxlist][\currentbtxlistparent][\s!handler=\v!btxlist,\c!level=1]%
+ % \else
+ % \defineconstruction[\currentbtxlist][\s!handler=\v!btxlist,\c!level=1]%
+ % \fi
+ \ifx\currentbtxlistparent\empty
+ \letvalue{\??constructionmain\currentbtxlist}\currentbtxlist
+ \else
+ \letvalue{\??constructionmain\currentbtxlist}\currentbtxlistparent
+ \fi
+ \setevalue{\??constructionlevel\currentbtxlist}{\number\btxlistparameter\c!level}%
+ \setevalue{\??constructionclass\currentbtxlist}{\btxlistparameter\s!handler}%
+\to \everydefinebtxlist
+
+\setupbtxlist
+ [\s!handler=\v!btxlist,
+ \c!level=1]
+
+\setupbtxlist
+ [\c!alternative=\v!left,
+ \c!headstyle=,
+ \c!titlestyle=,
+ %\c!style=,
+ %\c!color=,
+ %\c!headcolor=,
+ %\c!titlecolor=,
+ \c!width=4\emwidth,
+ \c!distance=\emwidth,
+ %\c!titledistance=.5\emwidth,
+ %\c!hang=,
+ %\c!sample=,
+ %\c!align=,
+ %\c!headalign=,
+ \c!margin=\v!no,
+ \c!before=\blank,
+ \c!inbetween=\blank,
+ \c!after=\blank,
+ %\c!indentnext=\v!yes,
+ %\c!indenting=\v!never,
+ %\c!titleleft=(,
+ %\c!titleright=),
+ %\c!closesymbol=,
+ %\c!closecommand=\wordright,
+ \c!display=\v!yes,
+ \c!command=,
+ %\c!titlecommand=,
+ %\c!expansion=\v!no,
+ %\c!xmlsetup=,
+ %\s!catcodes=,
+ %\c!title=\v!yes,
+ %\c!text=,
+ ]
+
+% here starts the bib stuff
+
+\installcorenamespace {btxdataset}
+\installcorenamespace {btxlistvariant}
+\installcorenamespace {btxcitevariant}
+\installcorenamespace {btxrendering}
+\installcorenamespace {btxcommand}
+\installcorenamespace {btxnumbering}
+
+\installcommandhandler \??btxdataset {btxdataset} \??btxdataset
+\installcommandhandler \??btxlistvariant {btxlistvariant} \??btxlistvariant
+\installcommandhandler \??btxcitevariant {btxcitevariant} \??btxcitevariant
+\installcommandhandler \??btxrendering {btxrendering} \??btxrendering
+
+\unexpanded\def\usebtxdataset
+ {\dodoubleargument\publ_use_dataset}
+
+\def\publ_use_dataset[#1][#2]%
+ {\ifsecondargument
+ \ctxcommand{usebtxdataset("#1","#2")}%
+ \else
+ \ctxcommand{usebtxdataset("\v!standard","#1")}%
+ \fi}
+
+\definebtxdataset
+ [\v!standard]
+
+% \usebtxdataset
+% [standard]
+% [mybibs.bib]
+
+\unexpanded\def\startpublication
+ {\dodoubleempty\publ_set_publication}
+
+\let\stoppublication\relax
+
+\def\publ_set_publication[#1][#2]%
+ {\begingroup
+ \catcode\commentasciicode\othercatcode
+ \ifsecondargument
+ \expandafter\publ_set_publication_indeed
+ \else\iffirstargument
+ \doubleexpandafter\publ_set_publication_checked
+ \else
+ \doubleexpandafter\publ_set_publication_default
+ \fi\fi{#1}{#2}}
+
+\def\publ_set_publication_default#1#2%
+ {\publ_set_publication_indeed\v!standard{#1}}
+
+\def\publ_set_publication_checked#1#2%
+ {\doifassignmentelse{#1}
+ {\publ_set_publication_indeed\v!standard{#1}}
+ {\publ_set_publication_indeed{#1}{}}}
+
+\def\publ_set_publication_indeed#1#2#3\stoppublication
+ {\ctxcommand{addbtxentry("#1",\!!bs#2\!!es,\!!bs\detokenize{#3}\!!es)}%
+ \endgroup
+ \ignorespaces}
+
+% commands
+
+\unexpanded\def\btxcommand#1%
+ {\ifcsname\??btxcommand#1\endcsname
+ \expandafter\publ_command_yes
+ \else
+ \expandafter\publ_command_nop
+ \fi{#1}}
+
+\let\btxcmd\btxcommand
+
+\def\publ_command_yes#1%
+ {\csname\??btxcommand#1\endcsname}
+
+\def\publ_command_nop#1%
+ {\ifcsname#1\endcsname
+ \writestatus\m!publications{unknown command: #1, using built-in context variant #1}%
+ %\setuxvalue{\??btxcommand#1}{\expandafter\noexpand\csname#1\endcsname}%
+ \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname#1\endcsname
+ \else\ifcsname\utfupper{#1}\endcsname
+ \writestatus\m!publications{unknown command: #1, using built-in context variant \utfupper{#1}}%
+ %\setuxvalue{\??btxcommand#1}{\expandafter\noexpand\csname\utfupper{#1}\endcsname}%
+ \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname\utfupper{#1}\endcsname
+ \else
+ \writestatus\m!publications{unknown command: #1}%
+ \setugvalue{\??btxcommand#1}{\underbar{\tttf#1}}%
+ \fi\fi
+ \publ_command_yes{#1}}
+
+\unexpanded\def\definebtxcommand#1% {body} #1..#n{body}
+ {\setuvalue{\??btxcommand\strippedcsname#1}}%
+
+% access
+
+\let\currentbtxtag \empty
+\let\currentbtxdataset\v!standard
+
+\unexpanded\def\setbtxdataset[#1]%
+ {\edef\currentbtxdataset{\ctxcommand{setbtxdataset("#1")}}}
+
+\unexpanded\def\setbtxentry[#1]%
+ {\edef\currentbtxtag{\ctxcommand{setbtxentry("\currentbtxdataset","#1")}}}
+
+% \let\btxsetdataset\setbtxdataset
+% \let\btxsetentry \setbtxentry
+
+\def\btxfield #1{\ctxcommand{btxfield("\currentbtxdataset","\currentbtxtag","#1")}}
+\def\btxdetail #1{\ctxcommand{btxdetail("\currentbtxdataset","\currentbtxtag","#1")}}
+\def\btxflush #1{\ctxcommand{btxflush("\currentbtxdataset","\currentbtxtag","#1")}}
+%def\btxrendering#1{\ctxcommand{btxrendering("\currentbtxdataset","\currentbtxtag","#1","\btxrenderingparameter\c!interaction")}}
+\def\btxdoifelse #1{\ctxcommand{btxdoifelse("\currentbtxdataset","\currentbtxtag","#1")}}
+\def\btxdoif #1{\ctxcommand{btxdoif("\currentbtxdataset","\currentbtxtag","#1")}}
+\def\btxdoifnot #1{\ctxcommand{btxdoifnot("\currentbtxdataset","\currentbtxtag","#1")}}
+
+\let\btxsetup \directsetup
+
+%D How complex will we go? Can we assume that e.g. an apa style will not be mixed
+%D with another one? I think this assumption is okay. For manuals we might want to
+%D mix but we can work around it.
+
+%D Rendering.
+
+\unexpanded\def\btxspace {\removeunwantedspaces\space}
+\unexpanded\def\btxperiod {\removeunwantedspaces.\space}
+\unexpanded\def\btxcomma {\removeunwantedspaces,\space}
+\unexpanded\def\btxlparent {\removeunwantedspaces\space(}
+\unexpanded\def\btxrparent {\removeunwantedspaces)\space}
+\unexpanded\def\btxlbracket{\removeunwantedspaces\space[}
+\unexpanded\def\btxrbracket{\removeunwantedspaces]\space}
+
+%D Rendering lists and citations.
+
+\newconditional\c_btx_trace
+
+\installtextracker
+ {btxrendering}
+ {\settrue \c_btx_trace}
+ {\setfalse\c_btx_trace}
+
+\unexpanded\def\startbtxrendering
+ {\begingroup
+ \dosingleempty\btx_start_rendering}
+
+\def\btx_start_rendering[#1]%
+ {\edef\currentbtxrendering{#1}}
+
+\unexpanded\def\stopbtxrendering
+ {\endgroup}
+
+\unexpanded\def\btxtodo#1%
+ {[#1]}
+
+%D Specific rendering definitions (like apa):
+
+\unexpanded\def\loadbtxdefinitionfile[#1]%
+ {\ctxcommand{loadbtxdefinitionfile("#1")}}
+
+%D Lists:
+
+\newdimen\d_publ_number_width
+%newdimen\d_publ_number_distance
+
+\ifdefined\btxblock \else \newcount\btxblock \fi \btxblock\plusone
+\ifdefined\btxcounter \else \newcount\btxcounter \fi
+
+\newtoks \everysetupbtxlistplacement % name will change
+\newtoks \everysetupbtxciteplacement % name will change
+
+% \def\publ_list_processor % bibref -> btx (old method, keep as reference)
+% {\ctxcommand{btxaddtolist("\currentbtxrendering",\currentlistindex,"btxref")}}
+
+\definelist % only used for selecting
+ [btx]
+
+\setuplist
+ [btx]%
+ [\c!state=\v!start]%
+
+\appendtoks
+ \ifx\currentbtxrenderingparent\empty
+ \definebtxlist
+ [\currentbtxrendering]%
+ \else
+ \definebtxlist
+ [\currentbtxrendering]%
+ [\currentbtxrenderingparent]%
+ \fi
+\to \everydefinebtxrendering
+
+\unexpanded\def\btx_entry_inject
+ {\begingroup
+ \edef\currentbtxcategory{\btxfield{category}}%
+ \ignorespaces
+ \directsetup{\s!btx:\currentbtxalternative:\currentbtxcategory}%
+ \removeunwantedspaces
+ \endgroup}
+
+\unexpanded\def\completebtxrendering{\dodoubleempty\publ_place_list_complete}
+\unexpanded\def\placebtxrendering {\dodoubleempty\publ_place_list_standard}
+
+\let\completelistofpublications\completebtxrendering
+\let\placelistofpublications \placebtxrendering
+
+\def\publ_place_list_check_criterium
+ {\edef\currentbtxcriterium{\btxrenderingparameter\c!criterium}% \v!cite will become \s!cite
+ \ifx\currentbtxcriterium\empty
+ \let\currentbtxcriterium\v!previous
+ \else\ifx\currentbtxcriterium\v!cite
+ \let\currentbtxcriterium\v!here
+ \fi\fi}
+
+\def\publ_place_list_complete[#1][#2]% title might become obsolete, just headtext
+ {\begingroup
+ \edef\currentbtxrendering{#1}%
+ \setupcurrentbtxrendering[#2]%
+ \let\currentlist\s!btx
+ \let\currentbtxlist\currentbtxrendering
+ \publ_place_list_check_criterium
+ \edef\currentbtxrenderingtitle{\btxrenderingparameter\c!title}%
+ \ifx\currentbtxrenderingtitle\empty
+ \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\headtext{\currentbtxrendering}}]}%
+ \else
+ \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\currentbtxrenderingtitle}]}%
+ \fi
+ \publ_place_list_indeed
+ \stopnamedsection
+ \endgroup}
+
+\def\publ_place_list_standard[#1][#2]%
+ {\begingroup
+ \edef\currentbtxrendering{#1}%
+ \setupcurrentbtxrendering[#2]%
+ \let\currentlist\s!btx
+ \let\currentbtxlist\currentbtxrendering
+ \publ_place_list_check_criterium
+ \publ_place_list_indeed
+ \endgroup}
+
+\newconditional\c_publ_place_all
+\newconditional\c_publ_place_register % to be interfaced
+\newconditional\c_publ_place_check % to be interfaced
+
+\appendtoks
+ \ifx\currentbtxcriterium\v!all % move this check to lua ... easier to test there anyway
+ \settrue\c_publ_place_all
+ \else
+ \setfalse\c_publ_place_all
+ \fi
+\to \everysetupbtxlistplacement
+
+\def\publ_place_list_indeed
+ {\startbtxrendering[\currentbtxrendering]%
+ \directsetup{\btxrenderingparameter\c!setups}%
+ % \determinelistcharacteristics[\currentbtxrendering]%
+ \edef\currentbtxalternative{\btxrenderingparameter\c!alternative}%
+ \edef\currentbtxdataset{\btxrenderingparameter\c!dataset}%
+ \let\currentlist\s!btx
+ \let\currentbtxlist\currentbtxrendering
+ \the\everysetupbtxlistplacement
+ \forgetall
+ \ctxcommand{btxsetlistmethod("\currentbtxdataset","\btxrenderingparameter\c!method")}%
+ \startpacked[\v!blank]%
+ % here we just collect items
+ \ctxcommand{btxcollectlistentries {
+ names = "btx",
+ criterium = "\currentbtxcriterium",
+ number = "\btxrenderingparameter\c!number",
+ btxdataset = "\currentbtxdataset",
+ keyword = "\btxrenderingparameter\c!keyword",
+ }}%
+ % next we analyze the width
+ \ifx\btx_reference_inject_indeed\relax \else
+ \edef\p_width{\btxrenderingparameter\c!width}%
+ \ifx\p_width\v!auto
+ \scratchcounter\btxcounter
+ \setbox\scratchbox\vbox{\settrialtypesetting\ctxcommand{btxfetchlistentries("\currentbtxdataset")}}%
+ \d_publ_number_width\wd\scratchbox
+ \global\btxcounter\scratchcounter
+ \letbtxlistparameter\c!width\d_publ_number_width
+ \fi
+ \fi
+ % this actually typesets them
+ \ctxcommand{btxflushlistentries("\currentbtxdataset","\btxrenderingparameter\c!sorttype")}%
+ \stoppacked
+ \stopbtxrendering
+ \global\advance\btxblock\plusone}
+
+\def\currentbtxblock{\number\btxblock}
+
+\def\publ_place_list_entry_checked
+ {\ctxcommand{btxdoifelselistentryplaced("\currentbtxdataset","\currentbtxtag")}\donothing\publ_place_list_entry}
+
+\def\publ_place_list_entry_register
+ {\ctxcommand{btxregisterlistentry("\currentbtxdataset","\currentbtxtag")}}
+
+\unexpanded\def\btxhandlelistentry#1% called at the lua end
+ {\begingroup
+ \edef\currentbtxtag{#1}%
+ \ifconditional\c_publ_place_all
+ \publ_place_list_entry
+ \else\ifconditional\c_publ_place_check
+ \publ_place_list_entry_checked
+ \else
+ \publ_place_list_entry
+ \fi\fi
+ \endgroup}
+
+\unexpanded\def\publ_place_list_entry
+ {\global\advance\btxcounter\plusone
+ \ifconditional\c_publ_place_register
+ \publ_place_list_entry_register
+ \fi
+ \let\currentlist\s!btx
+ \startbtxlistentry\currentbtxrendering
+ \btx_entry_inject
+ \stopbtxlistentry}
+
+\unexpanded\def\btxchecklistentry#1% called at the lua end
+ {\begingroup
+ \edef\currentbtxtag{#1}%
+ \ifx\currentbtxcriterium\v!all % move this check to lua ... easier to test there anyway
+ \publ_check_list_entry
+ \else
+ \ctxcommand{btxdoifelselistentryplaced("\currentbtxdataset","\currentbtxtag")}\donothing\publ_check_list_entry
+ \fi
+ \endgroup}
+
+\unexpanded\def\publ_check_list_entry
+ {\global\advance\btxcounter\plusone
+ % todo, switch to font
+ \hbox{\btx_reference_checked}%
+ \par}
+
+\unexpanded\def\btx_reference_inject % we can use a faster \reference
+ {\dontleavehmode\begingroup % no box
+ \iftrialtypesetting\else
+ \ctxcommand{btxdestination("\currentbtxdataset","\currentbtxblock","\currentbtxtag","\number\btxcounter")}%
+ \fi
+ \btx_reference_inject_indeed
+ \endgroup}
+
+\unexpanded\def\btx_reference_checked
+ {\dontleavehmode\hbox\bgroup
+ \btx_reference_inject_indeed
+ \egroup}
+
+\setuvalue{\??btxnumbering\v!short }{\btxlistvariant{short}} % these will be setups
+\setuvalue{\??btxnumbering\v!bib }{\btxlistvariant{num}} % these will be setups
+\setuvalue{\??btxnumbering\s!unknown}{\btxlistvariant{num}} % these will be setups
+\setuvalue{\??btxnumbering\v!yes }{\btxlistvariant{num}} % these will be setups
+
+\appendtoks
+ \edef\p_btx_numbering{\btxrenderingparameter\c!numbering}%
+ \letlistparameter\c!numbercommand\firstofoneargument % for the moment, no doubling needed
+ \ifx\p_btx_numbering\v!no
+ \letlistparameter\c!textcommand\outdented % needed? we can use titlealign
+ \letlistparameter\c!symbol \v!none
+ \letlistparameter\c!aligntitle \v!yes
+ \let\btx_reference_inject_indeed\relax
+ \else
+ \ifcsname\??btxnumbering\p_btx_numbering\endcsname \else
+ \let\p_btx_numbering\s!unknown
+ \fi
+ \letlistparameter\c!headnumber\v!always
+ \expandafter\let\expandafter\btx_reference_inject_indeed\csname\??btxnumbering\p_btx_numbering\endcsname
+ \fi
+\to \everysetupbtxlistplacement
+
+% \appendtoks
+% \edef\currentbtxcriterium{\btxrenderingparameter\c!criterium}%
+% \to \everysetupbtxlistplacement
+
+\unexpanded\def\btxflushauthor
+ {\doifnextoptionalcselse\btx_flush_author_yes\btx_flush_author_nop}
+
+\def\btx_flush_author_yes[#1]{\btx_flush_author{#1}}
+\def\btx_flush_author_nop {\btx_flush_author{\btxlistvariantparameter\c!author}}
+
+\unexpanded\def\btx_flush_author#1#2%
+ {\edef\currentbtxfield{#2}%
+ \let\currentbtxlistvariant\currentbtxfield
+ \ctxcommand{btxauthor("\currentbtxdataset","\currentbtxtag","\currentbtxfield",{
+ combiner = "#1",
+ etallimit = \number\btxlistvariantparameter\c!etallimit,
+ etaldisplay = \number\btxlistvariantparameter\c!etaldisplay,
+ })}}
+
+\unexpanded\def\btxflushauthornormal {\btx_flush_author{normal}} % #1
+\unexpanded\def\btxflushauthornormalshort {\btx_flush_author{normalshort}} % #1
+\unexpanded\def\btxflushauthorinverted {\btx_flush_author{inverted}} % #1
+\unexpanded\def\btxflushauthorinvertedshort{\btx_flush_author{invertedshort}} % #1
+
+% \btxflushauthor{author}
+% \btxflushauthor{artauthor}
+% \btxflushauthor{editor}
+%
+% \btxflushauthor[normal]{author}
+% \btxflushauthor[normalshort]{author}
+% \btxflushauthor[inverted]{author}
+% \btxflushauthor[invertedshort]{author}
+
+% Interaction
+
+\newconditional\btxinteractive
+
+\unexpanded\def\btxdoifelseinteraction
+ {\iflocation
+ \edef\p_interaction{\btxcitevariantparameter\c!interaction}%
+ \ifx\p_interaction\v!stop
+ \doubleexpandafter\secondoftwoarguments
+ \else
+ \doubleexpandafter\firstoftwoarguments
+ \fi
+ \else
+ \expandafter\secondoftwoarguments
+ \fi}
+
+\appendtoks
+ \iflocation
+ \edef\p_interaction{\btxlistvariantparameter\c!interaction}%
+ \ifx\p_interaction\v!stop
+ \let\doifelsebtxinteractionelse\secondoftwoarguments
+ \setfalse\btxinteractive
+ \else
+ \let\doifelsebtxinteractionelse\firstoftwoarguments
+ \settrue\btxinteractive
+ \fi
+ \else
+ \let\doifelsebtxinteractionelse\secondoftwoarguments
+ \setfalse\btxinteractive
+ \fi
+\to \everysetupbtxlistplacement
+
+% bib -> btx
+
+\unexpanded\def\btxgotolink#1[#2]{\doifreferencefoundelse{\bibrefprefix#2}{\goto{#1}[\bibrefprefix#2]}{#1}}
+\unexpanded\def\btxatlink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\at [\bibrefprefix#1]}{#1}}
+\unexpanded\def\btxinlink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\expanded{\goto{\currentreferencetext}}[\bibrefprefix#1]}{#1}}
+
+\unexpanded\def\btxdirectlink#1#2{\goto{#2 {\tttf[#1]}}[#1]}
+\unexpanded\def\btxdirectlink#1#2{\goto{#2}[#1]}
+
+\let\gotobiblink\btxgotolink
+\let\atbiblink \btxatlink
+\let\inbiblink \btxinlink
+
+\unexpanded\def\btxnumberedreference[#1]% \bibtexnumref (replaced by \cite[num])
+ {\dontleavehmode
+ \begingroup
+ \btxcitevariantparameter\v!left
+ \penalty\plustenthousand % todo
+ \ctxcommand{btxresolvelistreference("\currentbtxdataset","#1")}% todo: split dataset from #1, so another call
+ \btxcitevariantparameter\v!right
+ \endgroup}
+
+% \def\btxnumberedplaceholder[#1]% \nobibtexnumref
+% {[#1]}
+
+\appendtoks
+ % for old times sake, for a while at least
+ \let\maybeyear\gobbleoneargument
+ \let\noopsort \gobbleoneargument
+\to \everysetupbtxlistplacement
+
+\appendtoks
+ % for old times sake, for a while at least
+ \let\maybeyear\gobbleoneargument
+ \let\noopsort \gobbleoneargument
+\to \everysetupbtxciteplacement
+
+\appendtoks
+ \doifnot{\btxrenderingparameter\c!continue}\v!yes
+ {\global\btxcounter\zerocount}%
+\to \everysetupbtxlistplacement
+
+%D When a publication is cited, we need to signal that somehow. This is done with the
+%D following (not user) command. We could tag without injecting a node but this way
+%D we also store the location, which makes it possible to ask local lists.
+
+\newconditional\c_publ_cite_write
+
+% for reference, but split now done at the lua end
+%
+% \def\publ_cite_write#1% not used
+% {\splitstring#1\at::\to\askedbtxrendering\and\askedbtxtag
+% \ifx\askedbtxtag\empty
+% \let\currentbtxtag \askedbtxrendering
+% \else
+% \let\currentbtxtag \askedbtxtag
+% \let\currentbtxrendering\askedbtxrendering
+% \fi
+% \iftrialtypesetting \else
+% \processcommacommand[\currentbtxtag]{\publ_cite_indeed\currentbtxrendering}%
+% \fi}
+
+\def\publ_cite_indeed#1#2%
+ {\expanded{\writedatatolist[btx][btxset=#1,btxref=#2]}}
+
+\def\btxdomarkcitation#1#2% called from lua end
+ {\iftrialtypesetting \else
+ \writedatatolist[btx][btxset=#1,btxref=#2]% \c!location=\v!here
+ \fi}
+
+%D \macros{cite,nocite,citation,nocitation,usecitation}
+%D
+%D The inline \type {\cite} command creates a (often) short reference to a publication
+%D and for historic reasons uses a strict test for brackets. This means, at least
+%D in the default case that spaces are ignored in the argument scanner. The \type
+%D {\citation} commands is more liberal but also gobbles following spaces. Both
+%D commands insert a reference as well as a visual clue.
+%D
+%D The \type {no} commands all do the same (they are synonyms): they make sure that
+%D a reference is injected but show nothing. However, they do create a node so best
+%D attach them to some text in order to avoid spacing interferences. A slightly
+%D less efficient alternative is \type {\cite[none][tag]}.
+
+% [tags]
+% [settings|variant][tags]
+% [base::tags]
+% [settings|variant][base::tags]
+
+\unexpanded\def\btxcite
+ {\dontleavehmode
+ \begingroup
+ \strictdoifnextoptionalelse\publ_cite_tags_options\publ_cite_tags_indeed}
+
+\unexpanded\def\publ_cite_tags_indeed#1%
+ {\letinteractionparameter\c!style\empty
+ \edef\currentbtxcitevariant{\btxcitevariantparameter\c!alternative}%
+ \edef\currentbtxcitetag{#1}%
+ \publ_cite_variant
+ \endgroup}
+
+\let\publ_citation_tags_indeed\publ_cite_tags_indeed
+
+\unexpanded\def\publ_cite_tags_options[#1]%
+ {\strictdoifnextoptionalelse{\publ_cite_tags_options_indeed{#1}}{\publ_cite_tags_indeed{#1}}}
+
+\unexpanded\def\publ_cite_tags_options_indeed#1[#2]%
+ {\edef\currentbtxcitetag{#2}%
+ \doifassignmentelse{#1}
+ {\publ_cite_tags_settings_indeed{#1}}
+ {\publ_cite_tags_variants_indeed{#1}}}
+
+\def\publ_cite_tags_settings_indeed#1%
+ {\letinteractionparameter\c!style\empty
+ %\letinteractionparameter\c!color\empty
+ \getdummyparameters[\c!alternative=,\c!extras=,#1]%
+ \edef\p_alternative{\dummyparameter\c!alternative}%
+ \ifx\p_alternative\empty \else
+ \let\currentbtxcitevariant\p_alternative
+ \fi
+ \setupcurrentbtxcitevariantparameters[#1]%
+ \edef\p_extras{\dummyparameter\c!extras}%
+ \ifx\p_extras\empty \else
+ \edef\p_right{\btxcitevariantparameter\c!right}%
+ \ifx\p_right\empty \else
+ \setexpandedbtxcitevariantparameter\p_right{\p_extras\p_right}%
+ \fi
+ \fi
+ \publ_cite_variant
+ \endgroup}
+
+\def\publ_cite_tags_variants_indeed#1%
+ {\letinteractionparameter\c!style\empty
+ \edef\currentbtxcitevariant{#1}%
+ \publ_cite_variant
+ \endgroup}
+
+\newconditional\btxcitecompress
+
+\def\publ_cite_variant
+ {\edef\p_compress{\btxcitevariantparameter\c!compress}%
+ % \ifx\p_compress\v!no
+ % \setfalse\btxcitecompress
+ % \else
+ % \settrue\btxcitecompress
+ % \fi
+ \begingroup
+ \settrue\c_publ_cite_write
+ \publ_cite_handle_variant_indeed[\currentbtxcitetag]}
+
+\unexpanded\def\publ_cite_handle_variant#1%
+ {\begingroup
+ \the\everysetupbtxciteplacement
+ \edef\currentbtxcitevariant{#1}%
+ \dosingleargument\publ_cite_handle_variant_indeed}
+
+\def\publ_cite_handle_variant_indeed[#1]%
+ {\usebtxcitevariantstyleandcolor\c!style\c!color
+ \letbtxcitevariantparameter\c!alternative\currentbtxcitevariant
+ \ctxcommand{btxhandlecite(%
+ "\currentbtxdataset",%
+ "#1",%
+ \iftrialtypesetting false\else true\fi,%
+ "\currentbtxcitevariant",%
+ "\btxcitevariantparameter\c!sorttype",%
+ "\btxcitevariantparameter\c!setups"%
+ )}%
+ \endgroup}
+
+\unexpanded\def\btxcitation
+ {\dontleavehmode
+ \begingroup
+ \dodoubleempty\publ_citation}
+
+\def\publ_citation[#1][#2]% could be made more efficient but not now
+ {\ifsecondargument
+ \publ_cite_tags_options_indeed{#1}[#2]%
+ \else
+ \publ_cite_tags_indeed{#1}%
+ \fi}
+
+\unexpanded\def\btxnocite
+ {\dosingleempty\publ_cite_no}
+
+\unexpanded\def\publ_cite_no[#1]%
+ {\iftrialtypesetting \else
+ \ctxcommand{btxhandlenocite("\currentbtxdataset","#1",true)}%
+ \fi}
+
+%D Compatibility:
+
+\let\cite \btxcite
+\let\citation \btxcitation
+\let\nocite \btxnocite
+\let\nocitation \btxnocite
+\let\usepublication\btxnocite
+
+%D Cite helpers:
+
+\unexpanded\def\btxcitevariant#1%
+ {\ctxcommand{btxcitevariant("\currentbtxdataset","\currentbtxblock","\currentbtxtag","#1")}}
+
+%D List helpers:
+
+\def\currentbtxindex{0}
+
+\unexpanded\def\btxlistvariant#1% was \currentbtxindex
+ {\begingroup
+ \edef\currentbtxlistvariant{#1}%
+ \btxlistvariantparameter\c!left
+ \ctxcommand{btxlistvariant("\currentbtxdataset","\currentbtxblock","\currentbtxtag","#1","\number\btxcounter")}% some can go
+ \btxlistvariantparameter\c!right
+ \endgroup}
+
+%D Whatever helpers:
+
+\unexpanded\def\btxsingularplural#1%
+ {\ctxcommand{btxsingularorplural("\currentbtxdataset","\currentbtxtag","#1")}}
+
+\let\btxsingularorplural\btxsingularplural
+
+%D Loading variants:
+
+\appendtoks
+ \loadbtxdefinitionfile[\btxrenderingparameter\c!alternative]
+\to \everysetupbtxrendering
+
+%D Defaults:
+
+\setupbtxrendering
+ [\c!dataset=\v!standard,
+ \c!method=\v!global,
+ \c!setups=btx:rendering:\btxrenderingparameter\c!alternative,
+ \c!alternative=apa,
+ \c!sorttype=,
+ \c!criterium=,
+ \c!refcommand=authoryears, % todo
+ \c!numbering=\v!yes,
+% \c!autohang=\v!no,
+ \c!width=\v!auto,
+ \c!distance=1.5\emwidth]
+
+\definebtxrendering
+ [\v!standard]
+
+\setupbtxcitevariant
+ [\c!interaction=\v!start,
+ \c!setups=btx:cite:\btxcitevariantparameter\c!alternative,
+ \c!alternative=num,
+ \c!andtext={ and },
+ \c!otherstext={ et al.},
+ \c!pubsep={, },
+ \c!lastpubsep={ and },
+ \c!compress=\v!no,
+ \c!inbetween={ },
+ \c!left=,
+ \c!right=]
+
+\definebtxcitevariant
+ [author]
+ [%c!sorttype=,
+ \c!left={(},
+ \c!middle={, },
+ \c!right={)}]
+
+\definebtxcitevariant
+ [authoryear]
+ [\c!compress=\v!yes,
+ \c!inbetween={, },
+ \c!left={(},
+ \c!middle={, },
+ \c!right={)}]
+
+\definebtxcitevariant
+ [authoryears]
+ [authoryear]
+
+\definebtxcitevariant
+ [authornum]
+ [author]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [year]
+ [\c!left={(},
+ \c!right={)}]
+
+\definebtxcitevariant
+ [key]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [serial]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [page]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [short]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [type]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [doi]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [url]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [page]
+ [\c!left=,
+ \c!right=,
+ \c!inbetween=\endash]
+
+\definebtxcitevariant
+ [num]
+ [\c!compress=\v!yes,
+ \c!inbetween={--},
+ \c!left={[},
+ \c!right={]}]
+
+\setupbtxlistvariant
+ [\c!namesep={, },
+ \c!lastnamesep={ and },
+ \c!finalnamesep={ and },
+ \c!firstnamesep={ },
+ \c!juniorsep={ },
+ \c!vonsep={ },
+ \c!surnamesep={, },
+ \c!surnameinitialsep={, },
+ \c!surnamefirstnamesep={, },
+ \c!etallimit=5,
+ \c!etaldisplay=5,
+ \c!etaltext={ et al.},
+ \c!monthconversion=\v!number,
+ \c!authorconversion=\v!normal]
+
+\definebtxlistvariant
+ [author]
+ [author=invertedshort] % we could also do this in the apa style itself
+
+\definebtxlistvariant
+ [editor]
+ [author]
+
+\definebtxlistvariant
+ [artauthor]
+ [author]
+
+% Do we want these in the format? Loading them delayed is somewhat messy.
+
+\loadbtxdefinitionfile[apa]
+\loadbtxdefinitionfile[cite]
+\loadbtxdefinitionfile[commands]
+\loadbtxdefinitionfile[definitions]
+
+\protect
diff --git a/Master/texmf-dist/tex/context/base/publ-old.mkiv b/Master/texmf-dist/tex/context/base/publ-old.mkiv
new file mode 100644
index 00000000000..f616428e6df
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-old.mkiv
@@ -0,0 +1,22 @@
+%D \module
+%D [ file=publ-old,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Old Fashioned \BIBTEX,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+% we could use runtime commands instead
+
+\unexpanded\def\setupbibtex {\usemodule[oldbibtex]\setupbibtex}
+\unexpanded\def\setuppublications {\usemodule[oldbibtex]\setuppublications}
+\unexpanded\def\setuppublicationlist{\usemodule[oldbibtex]\setuppublicationlist}
+
+\protect
diff --git a/Master/texmf-dist/tex/context/base/publ-oth.lua b/Master/texmf-dist/tex/context/base/publ-oth.lua
new file mode 100644
index 00000000000..14da19f9cb4
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-oth.lua
@@ -0,0 +1,146 @@
+if not modules then modules = { } end modules ['publ-oth'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local P, S, C, Ct, Cf, Cg, Cmt, Carg = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.Cmt, lpeg.Carg
+local lpegmatch = lpeg.match
+
+local p_endofline = lpeg.patterns.newline
+
+local loaders = publications.loaders
+local getindex = publications.getindex
+
+local function addfield(t,k,v,fields)
+ k = fields[k]
+ if k then
+ local tk = t[k]
+ if tk then
+ t[k] = tk .. " and " .. v
+ else
+ t[k] = v
+ end
+ end
+ return t
+end
+
+local function checkfield(_,_,t,categories,all)
+ local tag = t.tag
+ if tag then
+ local category = t.category
+ t.tag = nil
+ t.category = categories[category] or category
+ all[tag] = t
+ end
+ return true
+end
+
+-- endnotes --
+
+local fields = {
+ ["@"] = "tag",
+ ["0"] = "category",
+ ["A"] = "author",
+ ["E"] = "editor",
+ ["T"] = "title",
+ ["D"] = "year",
+ ["I"] = "publisher",
+}
+
+local categories = {
+ ["Journal Article"] = "article",
+}
+
+local entry = P("%") * Cg(C(1) * (S(" \t")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
+local record = Cf(Ct("") * (entry^1), addfield)
+local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
+
+function publications.endnotes_to_btx(data)
+ local all = { }
+ lpegmatch(records,data,1,fields,categories,all)
+ return all
+end
+
+function loaders.endnote(dataset,filename)
+ -- we could combine the next into checkfield but let's not create too messy code
+ loaders.lua(dataset,publications.endnotes_to_btx(io.loaddata(filename) or ""))
+end
+
+-- refman --
+
+local entry = Cg(C((1-lpeg.S(" \t")-p_endofline)^1) * (S(" \t-")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
+local record = Cf(Ct("") * (entry^1), addfield)
+local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
+
+local fields = {
+ ["SN"] = "tag",
+ ["TY"] = "category",
+ ["A1"] = "author",
+ ["E1"] = "editor",
+ ["T1"] = "title",
+ ["Y1"] = "year",
+ ["PB"] = "publisher",
+}
+
+local categories = {
+ ["JOUR"] = "article",
+}
+
+function publications.refman_to_btx(data)
+ local all = { }
+ lpegmatch(records,data,1,fields,categories,all)
+ return all
+end
+
+function loaders.refman(dataset,filename)
+ -- we could combine the next into checkfield but let's not create too messy code
+ loaders.lua(dataset,publications.refman_to_btx(io.loaddata(filename) or ""))
+end
+
+-- test --
+
+-- local endnote = [[
+-- %0 Journal Article
+-- %T Scientific Visualization, Overviews, Methodologies, and Techniques
+-- %A Nielson, Gregory M
+-- %A Hagen, Hans
+-- %A Müller, Heinrich
+-- %@ 0818677776
+-- %D 1994
+-- %I IEEE Computer Society
+--
+-- %0 Journal Article
+-- %T Scientific Visualization, Overviews, Methodologies, and Techniques
+-- %A Nielson, Gregory M
+-- %A Hagen, Hans
+-- %A Müller, Heinrich
+-- %@ 0818677775
+-- %D 1994
+-- %I IEEE Computer Society
+-- ]]
+--
+-- local refman = [[
+-- TY - JOUR
+-- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
+-- A1 - Nielson, Gregory M
+-- A1 - Hagen, Hans
+-- A1 - Müller, Heinrich
+-- SN - 0818677776
+-- Y1 - 1994
+-- PB - IEEE Computer Society
+--
+-- TY - JOUR
+-- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
+-- A1 - Nielson, Gregory M
+-- A1 - Hagen, Hans
+-- A1 - Müller, Heinrich
+-- SN - 0818677775
+-- Y1 - 1994
+-- PB - IEEE Computer Society
+-- ]]
+--
+-- inspect(publications.endnotes_to_btx(endnote))
+-- inspect(publications.refman_to_btx(refman))
diff --git a/Master/texmf-dist/tex/context/base/publ-tra.lua b/Master/texmf-dist/tex/context/base/publ-tra.lua
new file mode 100644
index 00000000000..98c81d800dd
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-tra.lua
@@ -0,0 +1,296 @@
+if not modules then modules = { } end modules ['publ-tra'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local sortedhash = table.sortedhash
+
+local tracers = { }
+publications.tracers = tracers
+local datasets = publications.datasets
+
+local context = context
+local NC, NR = context.NC, context.NR
+local bold = context.bold
+local darkgreen, darkred, darkblue = context.darkgreen, context.darkred, context.darkblue
+
+local fields = table.sorted {
+ "abstract",
+ "address",
+ "annotate",
+ "author",
+ "booktitle",
+ "chapter",
+ "comment",
+ "country",
+ "doi",
+ "edition",
+ "editor",
+ "eprint",
+ "howpublished",
+ "institution",
+ "isbn",
+ "issn",
+ "journal",
+ "key",
+ "keyword",
+ "keywords",
+ "language",
+ "lastchecked",
+ "month",
+ "names",
+ "note",
+ "notes",
+ "number",
+ "organization",
+ "pages",
+ "publisher",
+ "school",
+ "series",
+ "size",
+ "title",
+ "type",
+ "url",
+ "volume",
+ "year",
+ "nationality",
+ "assignee",
+ "bibnumber",
+ "day",
+ "dayfiled",
+ "monthfiled",
+ "yearfiled",
+ "revision",
+}
+
+local citevariants = table.sorted {
+ "author",
+ "authoryear",
+ "authoryears",
+ "authornum",
+ "year",
+ "short",
+ "serial",
+ "key",
+ "doi",
+ "url",
+ "type",
+ "page",
+ "none",
+ "num",
+}
+
+local listvariants = table.sorted {
+ "author",
+ "editor",
+ "artauthor",
+}
+
+-- local categories = table.sorted {
+-- "article",
+-- "book",
+-- "booklet",
+-- "conference",
+-- "inbook",
+-- "incollection",
+-- "inproceedings",
+-- "manual",
+-- "mastersthesis",
+-- "misc",
+-- "phdthesis",
+-- "proceedings",
+-- "techreport",
+-- "unpublished",
+-- }
+
+local categories = {
+ article = {
+ required = { "author", "title", "journal", "year" },
+ optional = { "volume", "number", "pages", "month", "note" },
+ },
+ book = {
+ required = { { "author", "editor" }, "title", "publisher", "year" },
+ optional = { { "volume", "number" }, "series", "address", "edition", "month","note" },
+ },
+ booklet = {
+ required = { "title" },
+ optional = { "author", "howpublished", "address", "month", "year", "note" },
+ },
+ inbook = {
+ required = { { "author", "editor" }, "title", { "chapter", "pages" }, "publisher","year" },
+ optional = { { "volume", "number" }, "series", "type", "address", "edition", "month", "note" },
+ },
+ incollection = {
+ required = { "author", "title", "booktitle", "publisher", "year" },
+ optional = { "editor", { "volume", "number" }, "series", "type", "chapter", "pages", "address", "edition", "month", "note" },
+ },
+ inproceedings = {
+ required = { "author", "title", "booktitle", "year" },
+ optional = { "editor", { "volume", "number" }, "series", "pages", "address", "month","organization", "publisher", "note" },
+ },
+ manual = {
+ required = { "title" },
+ optional = { "author", "organization", "address", "edition", "month", "year", "note" },
+ },
+ mastersthesis = {
+ required = { "author", "title", "school", "year" },
+ optional = { "type", "address", "month", "note" },
+ },
+ misc = {
+ required = { "author", "title", "howpublished", "month", "year", "note" },
+ optional = { "author", "title", "howpublished", "month", "year", "note" },
+ },
+ phdthesis = {
+ required = { "author", "title", "school", "year" },
+ optional = { "type", "address", "month", "note" },
+ },
+ proceedings = {
+ required = { "title", "year" },
+ optional = { "editor", { "volume", "number" }, "series", "address", "month", "organization", "publisher", "note" },
+ },
+ techreport = {
+ required = { "author", "title", "institution", "year" },
+ optional = { "type", "number", "address", "month", "note" },
+ },
+ unpublished = {
+ required = { "author", "title", "note" },
+ optional = { "month", "year" },
+ },
+}
+
+
+publications.tracers.fields = fields
+publications.tracers.categories = categories
+publications.tracers.citevariants = citevariants
+publications.tracers.listvariants = listvariants
+-- -- --
+
+function tracers.showdatasetfields(dataset)
+ local luadata = datasets[dataset].luadata
+ if next(luadata) then
+ context.starttabulate { "|lT|lT|pT|" }
+ NC() bold("tag")
+ NC() bold("category")
+ NC() bold("fields")
+ NC() NR() context.FL() -- HL()
+ for k, v in sortedhash(luadata) do
+ NC() context(k)
+ NC() context(v.category)
+ NC()
+ for k, v in sortedhash(v) do
+ if k ~= "details" and k ~= "tag" and k ~= "category" then
+ context("%s ",k)
+ end
+ end
+ NC() NR()
+ end
+ context.stoptabulate()
+ end
+end
+
+function tracers.showdatasetcompleteness(dataset)
+
+ dataset = datasets[dataset]
+
+ local preamble = { "|lBTw(10em)|p|" }
+
+ local function required(key,value,indirect)
+ NC() darkgreen(key)
+ NC() if indirect then
+ darkblue(value)
+ elseif value then
+ context(value)
+ else
+ darkred("\\tttf [missing]")
+ end
+ NC() NR()
+ end
+
+ local function optional(key,value,indirect)
+ NC() context(key)
+ NC() if indirect then
+ darkblue(value)
+ elseif value then
+ context(value)
+ end
+ NC() NR()
+ end
+
+ local function identified(tag,crossref)
+ NC() context("tag")
+ NC() if crossref then
+ context("\\tttf %s\\hfill\\darkblue => %s",tag,crossref)
+ else
+ context("\\tttf %s",tag)
+ end
+ NC() NR()
+ end
+
+ local luadata = datasets[dataset].luadata
+
+ if next(luadata) then
+ for tag, entry in table.sortedhash(luadata) do
+ local category = entry.category
+ local fields = categories[category]
+ if fields then
+ context.starttabulate(preamble)
+ identified(tag,entry.crossref)
+ context.HL()
+ local requiredfields = fields.required
+ local optionalfields = fields.optional
+ for i=1,#requiredfields do
+ local r = requiredfields[i]
+ if type(r) == "table" then
+ local okay = true
+ for i=1,#r do
+ local ri = r[i]
+ if rawget(entry,ri) then
+ required(ri,entry[ri])
+ okay = true
+ elseif entry[ri] then
+ required(ri,entry[ri],true)
+ okay = true
+ end
+ end
+ if not okay then
+ required(table.concat(r,"\\letterbar "))
+ end
+ elseif rawget(entry,r) then
+ required(r,entry[r])
+ elseif entry[r] then
+ required(r,entry[r],true)
+ else
+ required(r)
+ end
+ end
+ for i=1,#optionalfields do
+ local o = optionalfields[i]
+ if type(o) == "table" then
+ for i=1,#o do
+ local oi = o[i]
+ if rawget(entry,oi) then
+ optional(oi,entry[oi])
+ elseif entry[oi] then
+ optional(oi,entry[oi],true)
+ end
+ end
+ elseif rawget(entry,o) then
+ optional(o,entry[o])
+ elseif entry[o] then
+ optional(o,entry[o],true)
+ end
+ end
+ context.stoptabulate()
+ else
+ -- error
+ end
+ end
+ end
+
+end
+
+commands.showbtxdatasetfields = tracers.showdatasetfields
+commands.showbtxdatasetcompleteness = tracers.showdatasetcompleteness
diff --git a/Master/texmf-dist/tex/context/base/publ-tra.mkiv b/Master/texmf-dist/tex/context/base/publ-tra.mkiv
new file mode 100644
index 00000000000..49fb6d96275
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-tra.mkiv
@@ -0,0 +1,35 @@
+%D \module
+%D [ file=publ-tra,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Tracing,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% todo: make this a runtime module
+% todo: use the module interface
+
+\writestatus{loading}{ConTeXt Publication Support / Tracing}
+
+\registerctxluafile{publ-tra}{1.001}
+
+\unprotect
+
+\unexpanded\def\showbtxdatasetfields
+ {\dosingleempty\publ_dataset_show_fields}
+
+\def\publ_dataset_show_fields[#1]%
+ {\ctxcommand{showbtxdatasetfields("\iffirstargument#1\else\currentbtxdataset\fi")}}
+
+\unexpanded\def\showbtxdatasetcompleteness
+ {\dosingleempty\publ_dataset_show_completeness}
+
+\def\publ_dataset_show_completeness[#1]%
+ {\ctxcommand{showbtxdatasetcompleteness("\iffirstargument#1\else\currentbtxdataset\fi")}}
+
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/publ-usr.lua b/Master/texmf-dist/tex/context/base/publ-usr.lua
new file mode 100644
index 00000000000..6bb93ebeef5
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-usr.lua
@@ -0,0 +1,91 @@
+if not modules then modules = { } end modules ['publ-usr'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- local chardata = characters.data
+
+-- local str = [[
+-- \startpublication[k=Berdnikov:TB21-2-129,t=article,a={{Berdnikov},{}},y=2000,n=2257,s=BHHJ00]
+-- \artauthor[]{Alexander}[A.]{}{Berdnikov}
+-- \artauthor[]{Hans}[H.]{}{Hagen}
+-- \artauthor[]{Taco}[T.]{}{Hoekwater}
+-- \artauthor[]{Bogus{\l}aw}[B.]{}{Jackowski}
+-- \pubyear{2000}
+-- \arttitle{{Even more MetaFun with \MP: A request for permission}}
+-- \journal{TUGboat}
+-- \issn{0896-3207}
+-- \volume{21}
+-- \issue{2}
+-- \pages{129--130}
+-- \month{6}
+-- \stoppublication
+-- ]]
+
+local remapped = {
+ artauthor = "author",
+ arttitle = "title",
+}
+
+local P, Cs, R, Cc, Carg = lpeg.P, lpeg.Cs, lpeg.R, lpeg.Cc, lpeg.Carg
+
+local function register(target,key,a,b,c,d,e)
+ key = remapped[key] or key
+ if b and d and e then
+ local s = nil
+ if b ~= "" and b then
+ s = s and s .. " " .. b or b
+ end
+ if d ~= "" and d then
+ s = s and s .. " " .. d or d
+ end
+ if e ~= "" and e then
+ s = s and s .. " " .. e or e
+ end
+ if a ~= "" and a then
+ s = s and s .. " " .. a or a
+ end
+ local value = target[key]
+ if s then
+ if value then
+ target[key] = value .. " and " .. s
+ else
+ target[key] = s
+ end
+ else
+ if not value then
+ target[key] = s
+ end
+ end
+ else
+ target[key] = b
+ end
+end
+
+local leftbrace = P("{")
+local rightbrace = P("}")
+local leftbracket = P("[")
+local rightbracket = P("]")
+
+local key = P("\\") * Cs(R("az","AZ")^1) * lpeg.patterns.space^0
+local mandate = leftbrace * Cs(lpeg.patterns.balanced) * rightbrace + Cc(false)
+local optional = leftbracket * Cs((1-rightbracket)^0) * rightbracket + Cc(false)
+local value = optional^-1 * mandate^-1 * optional^-1 * mandate^-2
+
+local pattern = ((Carg(1) * key * value) / register + P(1))^0
+
+function publications.addtexentry(dataset,settings,content)
+ settings = utilities.parsers.settings_to_hash(settings)
+ local data = {
+ tag = settings.tag or settings.k or "no tag",
+ category = settings.category or settings.t or "article",
+ }
+ lpeg.match(pattern,content,1,data) -- can set tag too
+ dataset.userdata[data.tag] = data
+ dataset.luadata[data.tag] = data
+ publications.markasupdated(dataset)
+ return data
+end
diff --git a/Master/texmf-dist/tex/context/base/publ-usr.mkiv b/Master/texmf-dist/tex/context/base/publ-usr.mkiv
new file mode 100644
index 00000000000..cb078f424d5
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-usr.mkiv
@@ -0,0 +1,2 @@
+% todo
+
diff --git a/Master/texmf-dist/tex/context/base/publ-xml.mkiv b/Master/texmf-dist/tex/context/base/publ-xml.mkiv
new file mode 100644
index 00000000000..007f9bb27d2
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/publ-xml.mkiv
@@ -0,0 +1,114 @@
+%D \module
+%D [ file=publ-xml,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=XML,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Publication Support / XML}
+
+\unprotect
+
+\unexpanded\def\convertbtxdatasettoxml
+ {\dosingleempty\publ_convert_to_xml}
+
+\def\publ_convert_to_xml[#1]%
+ {\ctxcommand{convertbtxdatasettoxml("\iffirstargument#1\else\v!standard\fi",true)}} % or current when not empty
+
+% \startxmlsetups btx:initialize
+% \xmlregistereddocumentsetups{#1}{}
+% \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
+% \xmlmain{#1}
+% \stopxmlsetups
+
+\startxmlsetups btx:initialize
+ \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
+ \xmlmain{#1}
+\stopxmlsetups
+
+% \startxmlsetups btx:entry
+% \xmlflush{#1}
+% \stopxmlsetups
+
+\startxmlsetups btx:field
+ \xmlflushcontext{#1}
+\stopxmlsetups
+
+\protect \endinput
+
+% \startxmlsetups bibtex:entry:getkeys
+% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='author']/text()}}
+% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='year' ]/text()}}
+% \xmladdsortentry{bibtex}{#1}{\xmlatt{#1}{tag}}
+% \stopxmlsetups
+
+% \startbuffer
+% \startxmlsetups xml:bibtex:sorter
+% \xmlresetsorter{bibtex}
+% % \xmlfilter{#1}{entry/command(bibtex:entry:getkeys)}
+% \xmlfilter{#1}{
+% bibtex
+% /entry[@category='article']
+% /field[@name='author' and find(text(),'Knuth')]
+% /../command(bibtex:entry:getkeys)}
+% \xmlsortentries{bibtex}
+% \xmlflushsorter{bibtex}{bibtex:entry:flush}
+% \stopxmlsetups
+% \stopbuffer
+
+% \bgroup
+% \setups[bibtex-commands]
+% \getbuffer
+% \egroup
+
+% \startxmlsetups bibtex:entry:flush
+% \xmlfilter{#1}{/field[@name='author']/context()} / %
+% \xmlfilter{#1}{/field[@name='year' ]/context()} / %
+% \xmlatt{#1}{tag}\par
+% \stopxmlsetups
+
+% \startpacked
+% \getbuffer
+% \stoppacked
+
+
+% \unexpanded\def\btx_xml_list_handle_entry
+% {\begingroup
+% \ignorespaces
+% \xmlfilter{btx:\currentbtxrendering}{/bibtex/entry[@tag='\currentbtxtag']/command(btx:format)}%
+% \removeunwantedspaces
+% \endgroup}
+
+% \startxmlsetups btx:format
+% \btxlistparameter\c!before\relax % prevents lookahead
+% \edef\currentbibxmlnode {#1}
+% \edef\currentbibxmltag {\xmlatt{#1}{tag}}
+% \edef\currentbtxcategory{\xmlatt{#1}{category}}
+% \ignorespaces
+% \xmlcommand{#1}{.}{btx:\currentbtxformat:\currentbibxmlcategory}
+% \removeunwantedspaces
+% \btxlistparameter\c!after\relax % prevents lookahead
+% \stopxmlsetups
+
+% \startxmlsetups btx:list
+% \xmlfilter{#1}{/bibtex/entry/command(bibtex:format)}
+% \stopxmlsetups
+
+% \startxmlsetups btx:btx
+% \xmlfilter{#1}{/entry/command(btx:format)}
+% \stopxmlsetups
+
+% \unexpanded\def\btx_xml_doifelse#1{\xmldoifelse\currentbibxmlnode{/field[@name='#1']}}
+% \unexpanded\def\btx_xml_doif #1{\xmldoif \currentbibxmlnode{/field[@name='#1']}}
+% \unexpanded\def\btx_xml_doifnot #1{\xmldoifnot \currentbibxmlnode{/field[@name='#1']}}
+% \def\btx_xml_flush #1{\xmlcontext \currentbibxmlnode{/field[@name='#1']}}
+% \def\btx_xml_setup {\xmlsetup \currentbibxmlnode} % {#1}
+% \unexpanded\def\btx_xml_todo #1{[#1]}
+
+% \xmlfilter{#1}{/field[@name='\currentbtxfield']/btxconcat('\currentbtxfield')}
diff --git a/Master/texmf-dist/tex/context/base/s-abr-01.tex b/Master/texmf-dist/tex/context/base/s-abr-01.tex
index 026f2ea09d8..733eebf7b95 100644
--- a/Master/texmf-dist/tex/context/base/s-abr-01.tex
+++ b/Master/texmf-dist/tex/context/base/s-abr-01.tex
@@ -34,6 +34,7 @@
%logo [FGA] {fga}
%logo [FGBBS] {fgbbs}
\logo [ACROBAT] {Acro\-bat}
+\logo [APA] {apa}
\logo [AFM] {afm}
\logo [API] {api}
\logo [ALEPH] {Aleph} % {\mathematics{\aleph}}
@@ -47,6 +48,7 @@
\logo [ASCIITEX] {ascii\TeX}
\logo [BACHOTEX] {Bacho\TeX}
\logo [BIBTEX] {bib\TeX}
+\logo [MLBIBTEX] {MLbib\TeX}
\logo [BLUESKY] {BlueSky}
\logo [BMP] {bmp}
\logo [BSD] {bsd}
@@ -232,11 +234,13 @@
\logo [SQL] {sql}
\logo [SSD] {ssd}
\logo [SVG] {svg}
+\logo [STIX] {Stix}
\logo [SWIG] {swig}
\logo [SWIGLIB] {SwigLib}
\logo [TABLE] {\TaBlE}
\logo [TCPIP] {tcp/ip}
\logo [TDS] {tds} % no sc te
+\logo [TEI] {tei} % no sc te
\logo [TETEX] {te\TeX} % no sc te
\logo [TEX] {\TeX}
\logo [TEXADRES] {\TeX adress}
@@ -299,6 +303,7 @@
\logo [XFDF] {xfdf}
\logo [XHTML] {xhtml}
\logo [XINDY] {Xindy}
+\logo [XITS] {Xits}
\logo [XML] {xml}
\logo [XPATH] {xpath}
\logo [XMLTOOLS] {xmltools}
diff --git a/Master/texmf-dist/tex/context/base/s-abr-04.tex b/Master/texmf-dist/tex/context/base/s-abr-04.tex
index dcd93c6f151..23940b52629 100644
--- a/Master/texmf-dist/tex/context/base/s-abr-04.tex
+++ b/Master/texmf-dist/tex/context/base/s-abr-04.tex
@@ -1,8 +1,8 @@
%D \module
-%D [ file=s-abr-01,
+%D [ file=s-abr-04,
%D version=1996.01.01,
%D title=\CONTEXT\ Style File,
-%D subtitle=General Abbreviations 1,
+%D subtitle=General Abbreviations 2,
%D author=Hans Hagen,
%D date=\currentdate,
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
@@ -11,314 +11,14 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\unprotect
-
-% \setupsorting[logo][\c!style=\v!capital]
-
-% \setupcapitals[\c!title=\v!no]
-
-\protect
-
-\logo [MKI] {MkI}
-\logo [MKII] {MkII}
-\logo [MKIII] {MkIII}
-\logo [MKIV] {MkIV}
+\usemodule[abr-01]
-%logo [FGA] {fga}
-%logo [FGBBS] {fgbbs}
-\logo [ACROBAT] {Acro\-bat}
-\logo [AFM] {afm}
-\logo [API] {api}
-\logo [ALEPH] {Aleph} % {\mathematics{\aleph}}
-\logo [ALGOL] {ALGOL}
-\logo [AMS] {ams}
-\logo [AMSLATEX] {AmS\LATEX}
-\logo [AMSTEX] {AmS\TeX}
-\logo [ANSI] {ansi}
-\logo [ARABTEX] {Arab\TeX}
-\logo [ASCII] {ascii}
-\logo [ASCIITEX] {ascii\TeX}
-\logo [BACHOTEX] {Bacho\TeX}
-\logo [BIBTEX] {bib\TeX}
-\logo [BLUESKY] {BlueSky}
-\logo [BMP] {bmp}
-\logo [BSD] {bsd}
-\logo [CCODE] {c}
-\logo [CALCMATH] {CalcMath}
-\logo [CD] {cd}
-\logo [CLD] {cld}
-\logo [CPU] {cpu}
-\logo [CDROM] {cdrom}
-\logo [CID] {cid}
-\logo [CJK] {cjk}
-\logo [CMR] {cmr}
-\logo [CMYK] {cmyk}
-\logo [CODHOST] {CodHost}
-\logo [CONTEXT] {Con{\TeX}t}
-\logo [CSS] {css}
-\logo [CTAN] {ctan}
-\logo [CTXTOOLS] {ctxtools}
-\logo [CWEB] {cweb}
-\logo [DANTE] {Dante}
-\logo [DISTILLER] {distiller}
-\logo [DRATEX] {Dra\TeX}
-\logo [DSC] {dsc}
-\logo [DTD] {dtd}
-\logo [DTK] {dtk}
-\logo [DTP] {dtp}
-\logo [DVD] {dvd}
-\logo [DVI] {dvi}
-\logo [DVIPDFM] {dvipdfm}
-\logo [DVIPDFMX] {dvipdfmx}
-\logo [DVIPOS] {dvipos}
-\logo [DVIPS] {dvips}
-\logo [DVIPSONE] {dvipsone}
-\logo [DVISCR] {dviscr}
-\logo [DVIWINDO] {dviwindo}
-\logo [EC] {ec}
-\logo [EIFFEL] {Eiffel}
-\logo [EMACS] {emacs}
-\logo [EMTEX] {em\TeX}
-\logo [ENCODING] {enc}
-\logo [ENCTEX] {enc\TeX}
-\logo [EPS] {eps}
-\logo [ETEX] {\eTeX}
-\logo [EUROBACHOTEX] {EuroBacho\TeX}
-\logo [EUROMATH] {EuroMath}
-\logo [EUROTEX] {Euro\TeX}
-\logo [EXAMPLE] {eXaMpLe}
-\logo [EXAMPLED] {exampled}
-\logo [EXAMPLEQ] {exampleq}
-\logo [EXAMPLER] {exampler}
-\logo [EXAMPLET] {examplet}
-\logo [EXAMPLEX] {examplex}
-\logo [EXIMPLE] {eXiMpLe}
-\logo [FAQ] {faq}
-\logo [FDF] {fdf}
-\logo [FONTFORGE] {FontForge}
-\logo [FOXET] {foXet}
-\logo [FPTEX] {fp\TeX}
-\logo [FREEBSD] {FreeBSD}
-\logo [FTP] {ftp}
-\logo [GHOSTSCRIPT]{Ghost\-script}
-\logo [GHOSTVIEW] {Ghost\-view}
-\logo [GIF] {gif}
-\logo [GNU] {gnu}
-\logo [GNUPLOT] {gnuplot}
-\logo [GS] {Ghost\-Script}
-\logo [GUST] {Gust}
-\logo [GWTEX] {gw\TeX}
-\logo [HSB] {hsb}
-\logo [HTML] {html}
-\logo [HTTP] {http}
-\logo [HZ] {hz}
-\logo [IBM] {ibm}
-\logo [IMAGEMAGICK]{ImageMagick}
-\logo [INITEX] {ini\TeX}
-\logo [INRSTEX] {inrs\TeX}
-\logo [IO] {io}
-\logo [IRCNET] {IRCnet}
-\logo [ISO] {iso}
-\logo [JAVA] {Java}
-\logo [JAVASCRIPT] {Java\-Script}
-\logo [JPEG] {jpeg}
-\logo [JPG] {jpg}
-\logo [JBIG] {jbig}
-\logo [KPATHSEA] {kpathsea}
-\logo [KPSE] {kpse}
-\logo [KPSEWHICH] {kpsewhich}
-\logo [MKTEXLSR] {mktexlsr}
-\logo [LAMSTEX] {LamS\TeX}
-\logo [LATEX] {La\TeX}
-\logo [LATEXTE] {La\TeX2e}
-\logo [LATEXTN] {La\TeX2.09}
-\logo [LINUX] {linux}
-\logo [LISP] {Lisp}
-\logo [LPEG] {lpeg}
-\logo [LUA] {Lua}
-\logo [LUAJIT] {LuaJIT}
-\logo [LUATEX] {Lua\TeX}
-\logo [LUAJITTEX] {Luajit\TeX}
-\logo [LUATOOLS] {luatools}
-\logo [MACOSX] {MacOSX}
-\logo [MACROTEX] {Macro\TeX}
-\logo [MAKEMPY] {MakeMPY}
-\logo [MAPPING] {map}
-\logo [MAPS] {Maps}
-\logo [MATHML] {MathML}
-\logo [METAFONT] {MetaFont}
-\logo [METAPOST] {MetaPost}
-\logo [METATEX] {Meta\TeX}
-\logo [MIKTEX] {Mik\TeX}
-\logo [MLTEX] {ml\TeX}
-\logo [METATYPE] {MetaType1}
-\logo [MODULA] {Modula}
-\logo [MOV] {mov}
-\logo [MPS] {mps}
-\logo [MPTOPDF] {mptopdf}
-\logo [MPLIB] {MPlib}
-\logo [MSDOS] {msdos}
-\logo [MSWINDOWS] {MS~Windows}
-\logo [MTXRUN] {mtxrun}
-\logo [MTXTOOLS] {mtxtools}
-\logo [NETPBM] {NetPBM}
-\logo [NTG] {ntg}
-\logo [NTS] {nts}
-\logo [OFM] {ofm}
-\logo [OMEGA] {Omega}
-\logo [OPENMATH] {OpenMath}
-\logo [OPENTYPE] {OpenType}
-\logo [OPI] {opi}
-\logo [OTF] {otf}
-\logo [OTP] {otp}
-\logo [OVF] {ovf}
-\logo [PASCAL] {Pascal}
-\logo [PCTEX] {pc\TeX}
-\logo [PDF] {pdf}
-\logo [PDFETEX] {pdfe\TeX}
-\logo [PDFTEX] {pdf\TeX}
-\logo [PDFTOOLS] {pdftools}
-\logo [PDFTOPS] {pdftops}
-\logo [PERL] {Perl}
-\logo [PERLTK] {Perl/Tk}
-\logo [PICTEX] {\PiCTeX}
-\logo [PK] {pk}
-\logo [PLAIN] {Plain}
-\logo [PNG] {png}
-\logo [POSIX] {posix}
-\logo [POSTSCRIPT] {Post\-Script}
-\logo [PPCHTEX] {\PPCHTeX}
-\logo [PRAGMA] {Pragma ADE}
-\logo [PRESS] {press}
-\logo [PRIFIL] {prifil}
-\logo [PS] {Post\-Script}
-\logo [PSCHECK] {pscheck}
-\logo [PSTOEDIT] {pstoedit}
-\logo [PSTOPAGE] {pstopage}
-\logo [PSTOPDF] {pstopdf}
-\logo [PSTRICKS] {pstricks}
-\logo [RAM] {ram}
-\logo [READER] {Acro\-bat Reader}
-\logo [RELAXNG] {Relax\kern.125emNG}
-\logo [RGB] {rgb}
-\logo [RLXTOOLS] {rlxtools}
-\logo [RUBY] {Ruby}
-\logo [SCITE] {SciTE}
-\logo [SGML] {sgml}
-\logo [SI] {si}
-\logo [SQL] {sql}
-\logo [TABLE] {\TaBlE}
-\logo [TCPIP] {tcp/ip}
-\logo [TDS] {tds} % no sc te
-\logo [TETEX] {te\TeX} % no sc te
-\logo [TEX] {\TeX}
-\logo [TEXADRES] {\TeX adress}
-\logo [TEXBASE] {\TeX base}
-\logo [TEXEDIT] {\TeX edit}
-\logo [TEXEXEC] {\TeX exec}
-\logo [TEXFONT] {\TeX font}
-\logo [TEXFORM] {\TeX form}
-\logo [TEXLIVE] {\TeX Live}
-\logo [TEXLUA] {\TeX Lua}
-\logo [TEXMF] {texmf}
-\logo [TEXMFSTART] {texmfstart}
-\logo [TEXNL] {tex-nl}
-\logo [TEXSHOW] {\TeX show}
-\logo [TEXSPELL] {\TeX spell}
-\logo [TEXGYRE] {\TeX\ Gyre}
-\logo [TEXSYNC] {texsync}
-\logo [TEXTMATE] {TextMate}
-\logo [TEXTOOLS] {\TeX tools}
-\logo [TEXUTIL] {\TeX util}
-\logo [TEXWORK] {\TeX work}
-\logo [TEXXET] {\TeX\XeT} \def\XeT{XeT}
-\logo [TFM] {tfm}
-\logo [TIF] {tif}
-\logo [TIFF] {tiff}
-\logo [TIFFINFO] {tiffinfo}
-\logo [TIFFTAGS] {tifftags}
-\logo [TMFTOOLS] {tmftools}
-\logo [TPIC] {tpic}
-\logo [TPM] {tpm}
-\logo [TRUETYPE] {TrueType}
-\logo [TTF] {ttf}
-\logo [TUG] {tug}
-\logo [TUGBOAT] {Tug\-Boat}
-\logo [TUGNEWS] {Tug\-News}
-\logo [TYPEONE] {Type1}
-\logo [UCS] {ucs}
-\logo [UNICODE] {Uni\-code}
-\logo [UNIX] {Unix}
-\logo [URI] {uri}
-\logo [URL] {url}
-\logo [USA] {usa}
-\logo [USENET] {usenet}
-\logo [UTF] {utf}
-\logo [UTF] {utf}
-\logo [VF] {vf}
-\logo [WDT] {wdt}
-\logo [WEB] {web}
-\logo [WEBC] {web2c}
-\logo [WIKI] {Wiki}
-\logo [WINDOWS] {Windows}
-\logo [WINNT] {WinNT}
-\logo [WINNX] {Win9x}
-\logo [WWW] {www}
-\logo [WYSIWYG] {wysiwyg}
-\logo [XDVI] {Xdvi}
-\logo [XETEX] {\XeTeX}
-\logo [XFDF] {xfdf}
-\logo [XHTML] {xhtml}
-\logo [XINDY] {Xindy}
-\logo [XML] {xml}
-\logo [XPATH] {xpath}
-\logo [XMLTOOLS] {xmltools}
-\logo [XPDFETEX] {xpdfe\TeX}
-\logo [XSL] {xsl}
-\logo [XSLFO] {xsl-fo}
-\logo [XSLT] {xslt}
-\logo [XSLTPROC] {xsltproc}
-\logo [XYPIC] {XYPIC} % wrong logo
-\logo [YandY] {y\&y}
-\logo [ZIP] {zip}
-
-\def\METAFUN {\MetaFun}
-
-\logo [METAFUN] {\MetaFun}
-
-\def\SystemSpecialA#1{$\langle\it#1\rangle$}
-\def\SystemSpecialB#1{{\tttf<#1>}}
-
-\def\CATCODE {\SystemSpecialA{catcode}}
-\def\CATCODES {\SystemSpecialA{catcodes}}
-\def\DIMENSION {\SystemSpecialA{dimension}}
-\def\DIMENSIONS {\SystemSpecialA{dimensions}}
-\def\COUNTER {\SystemSpecialA{counter}}
-\def\COUNTERS {\SystemSpecialA{counters}}
-\def\HBOX {\SystemSpecialA{hbox}}
-\def\HBOXES {\SystemSpecialA{hboxes}}
-\def\VBOX {\SystemSpecialA{vbox}}
-\def\VBOXES {\SystemSpecialA{vboxes}}
-\def\BOX {\SystemSpecialA{box}}
-\def\BOXES {\SystemSpecialA{boxes}}
-\def\TOKENLIST {\SystemSpecialA{token list}}
-\def\TOKENLISTS {\SystemSpecialA{token lists}}
-\def\NEWLINE {\SystemSpecialA{newline}}
-\def\SKIP {\SystemSpecialA{skip}}
-\def\SKIPS {\SystemSpecialA{skips}}
-\def\MUSKIP {\SystemSpecialA{muskip}}
-\def\MUSKIPS {\SystemSpecialA{muskips}}
-\def\MARK {\SystemSpecialA{mark}}
-\def\MARKS {\SystemSpecialA{marks}}
+\unprotect
-\def\SPACE {\SystemSpecialB{space}}
-\def\EOF {\SystemSpecialB{eof}}
-\def\TAB {\SystemSpecialB{tab}}
-\def\NEWPAGE {\SystemSpecialB{newpage}}
-\def\NEWLINE {\SystemSpecialB{newline}}
+% \definealternativestyle [\v!mixed] [\font_style_pseudoMixedCapped] [\font_style_pseudoMixedCapped]
-\def\THANH {H\`an Th\^e\llap{\raise 0.5ex\hbox{\'{}}} Th\`anh}
+\setupsorting[logo][\c!style=\font_style_pseudoMixedCapped]
-\def\THANH {H\`an Th\ecircumflexacute\ Th\`anh}
+% \setupcapitals[\c!title=\v!no]
-\endinput
+\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/s-fonts-features.mkiv b/Master/texmf-dist/tex/context/base/s-fonts-features.mkiv
index 8982f734747..b81b53a7176 100644
--- a/Master/texmf-dist/tex/context/base/s-fonts-features.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-fonts-features.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\startmodule[s-fonts-features]
+\startmodule[fonts-features]
\registerctxluafile{s-fonts-features}{}
diff --git a/Master/texmf-dist/tex/context/base/s-fonts-goodies.mkiv b/Master/texmf-dist/tex/context/base/s-fonts-goodies.mkiv
index f070818251b..e596507afa9 100644
--- a/Master/texmf-dist/tex/context/base/s-fonts-goodies.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-fonts-goodies.mkiv
@@ -13,7 +13,7 @@
%D More tables will follow here as we have many more goodies by now.
-\startmodule[s-fonts-goodies]
+\startmodule[fonts-goodies]
\registerctxluafile{s-fonts-goodies}{}
diff --git a/Master/texmf-dist/tex/context/base/s-fonts-missing.lua b/Master/texmf-dist/tex/context/base/s-fonts-missing.lua
index 829fed45f9e..9a75676a908 100644
--- a/Master/texmf-dist/tex/context/base/s-fonts-missing.lua
+++ b/Master/texmf-dist/tex/context/base/s-fonts-missing.lua
@@ -14,33 +14,23 @@ local function legend(id)
local privates = c.properties.privates
if privates then
local categories = table.swapped(fonts.loggers.category_to_placeholder)
- -- context.starttabulate { "|l|c|c|l|" }
- context.starttabulate { "|l|c|l|" }
+ context.starttabulate { "|c|l|" }
context.HL()
context.NC()
- context.bold("name")
- context.NC()
context.bold("symbol")
context.NC()
- -- context.bold("node")
- -- context.NC()
- context.bold("category")
+ context.bold("name")
context.NC()
context.NR()
context.HL()
for k, v in table.sortedhash(privates) do
local tag = characters.categorytags[categories[k]]
if tag and tag ~= "" then
- context.NC()
- context(k)
context.NC()
context.dontleavehmode()
context.char(v)
context.NC()
- -- context.dontleavehmode()
- -- commands.getprivatechar(k)
- -- context.NC()
- context(string.lower(tag))
+ context(k)
context.NC()
context.NR()
end
diff --git a/Master/texmf-dist/tex/context/base/s-fonts-missing.mkiv b/Master/texmf-dist/tex/context/base/s-fonts-missing.mkiv
index 6acef819ed7..c566f4995d3 100644
--- a/Master/texmf-dist/tex/context/base/s-fonts-missing.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-fonts-missing.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\startmodule[s-fonts-missing]
+\startmodule[fonts-missing]
\registerctxluafile{s-fonts-missing}{}
diff --git a/Master/texmf-dist/tex/context/base/s-fonts-shapes.mkiv b/Master/texmf-dist/tex/context/base/s-fonts-shapes.mkiv
index 56e3d80a745..f8eb8ffddd3 100644
--- a/Master/texmf-dist/tex/context/base/s-fonts-shapes.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-fonts-shapes.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\startmodule[s-fonts-shapes]
+\startmodule[fonts-shapes]
\registerctxluafile{s-fonts-shapes}{}
diff --git a/Master/texmf-dist/tex/context/base/s-fonts-tables.mkiv b/Master/texmf-dist/tex/context/base/s-fonts-tables.mkiv
index 98f9052ca37..e962f952d14 100644
--- a/Master/texmf-dist/tex/context/base/s-fonts-tables.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-fonts-tables.mkiv
@@ -13,7 +13,7 @@
% todo: make a mtxrun --script font option
-\startmodule[s-fonts-tables]
+\startmodule[fonts-tables]
\registerctxluafile{s-fonts-tables}{}
diff --git a/Master/texmf-dist/tex/context/base/s-fonts-vectors.lua b/Master/texmf-dist/tex/context/base/s-fonts-vectors.lua
index 1bac0ae8bad..af8042f8458 100644
--- a/Master/texmf-dist/tex/context/base/s-fonts-vectors.lua
+++ b/Master/texmf-dist/tex/context/base/s-fonts-vectors.lua
@@ -20,7 +20,7 @@ function moduledata.fonts.protrusions.showvector(specification)
local vector = vectors[specification.name or "?"]
if vector then
context.blank()
- context.startcolumns { n = specification.columns or 3 }
+ context.startcolumns { n = specification.columns or 3, balance="yes" }
context.starttabulate { "|T||cw(.5em)||" }
for unicode, values in table.sortedhash(vector) do
NC() context("%U",unicode)
@@ -65,7 +65,7 @@ function moduledata.fonts.expansions.showvector(specification)
local vector = vectors[specification.name or "?"]
if vector then
context.blank()
- context.startcolumns { n = specification.columns or 3 }
+ context.startcolumns { n = specification.columns or 3, balance="yes" }
context.starttabulate { "|T|cw(.5em)||" }
for unicode, value in table.sortedhash(vector) do
NC() context("%U",unicode)
diff --git a/Master/texmf-dist/tex/context/base/s-fonts-vectors.mkiv b/Master/texmf-dist/tex/context/base/s-fonts-vectors.mkiv
index 371a30cc569..2605fe96403 100644
--- a/Master/texmf-dist/tex/context/base/s-fonts-vectors.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-fonts-vectors.mkiv
@@ -13,7 +13,7 @@
%D This code is used in the \MKIV\ fonts manual.
-\startmodule[s-fonts-vectors]
+\startmodule[fonts-vectors]
\registerctxluafile{s-fonts-vectors}{}
diff --git a/Master/texmf-dist/tex/context/base/s-inf-03.mkiv b/Master/texmf-dist/tex/context/base/s-inf-03.mkiv
index 822173d0083..48449d6900b 100644
--- a/Master/texmf-dist/tex/context/base/s-inf-03.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-inf-03.mkiv
@@ -16,7 +16,7 @@
\definefont
[TitlePageFont]
- [MonoBold at 16pt]
+ [MonoBold at 15pt]
\setupbodyfont
[tt,8pt]
@@ -25,7 +25,7 @@
\definefont
[TitlePageFont]
- [MonoBold at 20pt]
+ [MonoBold at 18pt]
\setupbodyfont
[tt]
@@ -343,6 +343,10 @@ show("global","",sameglobal.global,false,_G,builtin,"darkgreen",globals,"darkblu
for k, v in table.sortedpairs(_G) do
if not skipglobal[k] and not obsolete[k] and type(v) == "table" and not marked(v) then
+
+ -- local mt = getmetatable(v)
+ -- print("!!!!!!!!!!",k,v,mt,mt and mt.__index)
+
if basiclua[k] then show(k,"basic lua",sameglobal[k],basiclua[k],v,builtin[k],"darkred", false,false,true)
elseif extralua[k] then show(k,"extra lua",sameglobal[k],extralua[k],v,builtin[k],"darkred", false,false,true)
elseif basictex[k] then show(k,"basic tex",sameglobal[k],basictex[k],v,builtin[k],"darkred", false,false,true)
diff --git a/Master/texmf-dist/tex/context/base/s-languages-hyphenation.lua b/Master/texmf-dist/tex/context/base/s-languages-hyphenation.lua
index 660392f802b..c16c5bd2d94 100644
--- a/Master/texmf-dist/tex/context/base/s-languages-hyphenation.lua
+++ b/Master/texmf-dist/tex/context/base/s-languages-hyphenation.lua
@@ -24,7 +24,7 @@ local newglue = nodepool.glue
local insert_node_after = node.insert_after
local traverse_by_id = node.traverse_id
local hyphenate = lang.hyphenate
-local find_tail = node.slide
+local find_tail = node.tail
local remove_node = nodes.remove
local tracers = nodes.tracers
diff --git a/Master/texmf-dist/tex/context/base/s-math-coverage.lua b/Master/texmf-dist/tex/context/base/s-math-coverage.lua
index 258019c9d58..5f1c7cc5a17 100644
--- a/Master/texmf-dist/tex/context/base/s-math-coverage.lua
+++ b/Master/texmf-dist/tex/context/base/s-math-coverage.lua
@@ -77,6 +77,8 @@ local chardata = characters.data
local superscripts = characters.superscripts
local subscripts = characters.subscripts
+context.writestatus("math coverage","underline: not remapped")
+
function moduledata.math.coverage.showalphabets()
context.starttabulate { "|lT|l|Tl|" }
for i=1,#styles do
@@ -121,7 +123,7 @@ function moduledata.math.coverage.showalphabets()
end
function moduledata.math.coverage.showcharacters()
- context.startcolumns()
+ context.startmixedcolumns()
context.setupalign { "nothyphenated" }
context.starttabulate { "|T|i2|Tpl|" }
for u, d in table.sortedpairs(chardata) do
@@ -148,7 +150,7 @@ function moduledata.math.coverage.showcharacters()
end
end
context.stoptabulate()
- context.stopcolumns()
+ context.stopmixedcolumns()
end
-- This is a somewhat tricky table as we need to bypass the math machinery.
diff --git a/Master/texmf-dist/tex/context/base/s-math-repertoire.mkiv b/Master/texmf-dist/tex/context/base/s-math-repertoire.mkiv
index a66d7fc6d0d..314d23868e5 100644
--- a/Master/texmf-dist/tex/context/base/s-math-repertoire.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-math-repertoire.mkiv
@@ -418,13 +418,13 @@
\continueifinputfile{s-math-repertoire.mkiv}
-\showmathcharacterssetbodyfonts{lucidanova,cambria,xits,modern,pagella,termes,bonum}
+\showmathcharacterssetbodyfonts{lucidanova,cambria,xits,modern,pagella,termes,bonum,schola}
\starttext
\doifelse {\getdocumentargument{bodyfont}} {} {
- \setupbodyfont[cambria, 12pt]
+ % \setupbodyfont[cambria, 12pt]
% \setupbodyfont[modern, 12pt]
% \setupbodyfont[lmvirtual, 12pt]
% \setupbodyfont[pxvirtual, 12pt]
@@ -437,6 +437,7 @@
% \setupbodyfont[lucidanova,12pt]
% \setupbodyfont[pagella, 12pt]
% \setupbodyfont[bonum, 12pt]
+ \setupbodyfont[schola, 12pt]
} {
diff --git a/Master/texmf-dist/tex/context/base/s-present-tiles.mkiv b/Master/texmf-dist/tex/context/base/s-present-tiles.mkiv
index 80ea5249f77..b68a34ef42b 100644
--- a/Master/texmf-dist/tex/context/base/s-present-tiles.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-present-tiles.mkiv
@@ -13,7 +13,7 @@
%D The Bacho\TeX\ 2013 style.
-\setupbodyfont[palatino,14.4pt]
+\setupbodyfont[pagella,14.4pt]
\setuppapersize[S6][S6]
@@ -64,8 +64,22 @@
-\topspace
]
-\definemeasure [topiclistfont] [\measured{topiclistwidth}/10]
-\definemeasure [titlepagefont] [2\measured{layoutunit}]
+\definemeasure [topiclistfontsize] [
+ \ifcase\numexpr\structurelistsize\relax 2.5 % 0
+ \or1.4\or1.4\or1.4\or1.4\or1.4\or1.4\or1.4\or1.4\or1.4 % 1 - 9
+ \or1.3\or1.3\or1.3\or1.3\or1.3\or1.3\or1.3 % 10 - 16
+ \or1.2\or1.2\or1.2\or1.2\or1.2 % 17 - 20
+ \else1 % 21 - 25
+ \fi
+ \bodyfontsize
+]
+
+\definemeasure [topiclistfont] [\measured{topiclistfontsize}]
+\definemeasure [topictitlefont] [1.2\measured{layoutunit}]
+\definemeasure [titlepagefont] [2\measured{layoutunit}]
+
+\predefinefont[MyTopicTitleFont][SerifBold*default at \measure{topictitlefont}]
+\predefinefont[MyTopicListFont] [SerifBold*default at \measure{topiclistfont}]
\defineframed
[topiclistentry]
@@ -73,7 +87,7 @@
height=\measure{topiclistheight},
background=color,
frame=off,
- foregroundstyle={\definedfont[Bold at \measure{topiclistfont}]},
+ foregroundstyle=MyTopicListFont,
backgroundcolor=primarycolor,
foregroundcolor=white]
@@ -108,8 +122,6 @@
\stoptopicmakeup
\stopsetups
-\predefinefont[MyTopicTitleFont][SerifBold*default at \measure{layoutunit}]
-
\definehead
[topic]
[chapter]
@@ -216,6 +228,15 @@
% end of buttons
+\defineframed
+ [conclusion]
+ [location=low,
+ width=max,
+ align={flushleft,lohi},
+ background=color,
+ backgroundcolor=white,
+ foregroundcolor=secondarycolor]
+
\startsetups [document:titlepage]
\definebodyfontenvironment
diff --git a/Master/texmf-dist/tex/context/base/s-sql-tables.mkiv b/Master/texmf-dist/tex/context/base/s-sql-tables.mkiv
index 6f507e8b595..12257087492 100644
--- a/Master/texmf-dist/tex/context/base/s-sql-tables.mkiv
+++ b/Master/texmf-dist/tex/context/base/s-sql-tables.mkiv
@@ -13,7 +13,7 @@
% for the moment no helpers
-\startmodule[s-sql-tables]
+\startmodule[sql-tables]
\registerctxluafile{s-sql-tables}{}
diff --git a/Master/texmf-dist/tex/context/base/s-youless.mkiv b/Master/texmf-dist/tex/context/base/s-youless.mkiv
new file mode 100644
index 00000000000..247eb5f648f
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/s-youless.mkiv
@@ -0,0 +1,170 @@
+%D \module
+%D [ file=s-youless,
+%D version=2013.11.12,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Youless Graphics,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is experimental code. When I have collected enough data I will make the
+%D graphics nicer and provide some more.
+%D
+%D The Jouless can serve web pages but there is not much detail in them. They also are
+%D somewhat bad \HTML, like unquoted attributes and so. We don't need this anyway as we
+%D can also fetch data directly. The data is collected using a dedicated helper script
+%D (of course we could just call it as module too). The data is fetched from the Jouless
+%D device using queries (currently we use json, but a more direct parsing of data might
+%D be more efficient). The data is converted into a proper \LUA\ table and saved (merged).
+
+% in cronjob on one of the servers:
+%
+% mtxrun --script youless --collect --host=192.168.2.50 --watt youless-watt.lua
+% mtxrun --script youless --collect --host=192.168.2.50 --kwk youless-kwh.lua
+
+\startluacode
+
+ require("util-you")
+
+ moduledata.youless = { }
+
+ function moduledata.youless.kwh(specification)
+ -- todo
+ end
+
+ function moduledata.youless.watt(specification)
+
+ local year = tonumber(specification.year) or os.today().year
+ local data = table.load(specification.filename or "youless-watt.lua")
+
+ if not data or data.variant ~= "watt" then
+ context("invalid variant")
+ return
+ end
+
+ utilities.youless.analyze(data)
+
+ -- for the moment no specific font scaling
+
+ local years = data.years
+
+ if not years then
+ context("no years")
+ return
+ end
+
+ for y=year,year do
+
+ local year = years[y]
+ local scale = 20
+ local mark = 3
+
+ for m=1,12 do
+ local month = year.months[m]
+ if month then
+ context.startMPpage { offset = "10pt" }
+ context("linecap := butt; pickup pencircle scaled .5")
+
+ for i=0,(math.div(year.maxwatt,1000)+1)*1000,100 do
+ context("draw (%s,%s) -- (%s,%s) withcolor .6white ;",0,i/scale,31 * 24,i/scale)
+ end
+
+ context("draw (0,%s) -- (31 * 24,%s) dashed dashpattern(on 6 off 6) withcolor darkgreen withpen pencircle scaled 1 ;",year.watt /scale,year.watt /scale)
+ context("draw (0,%s) -- (31 * 24,%s) dashed dashpattern(off 6 on 6) withcolor darkred withpen pencircle scaled 1 ;",month.watt/scale,month.watt/scale)
+
+ local days = month.days
+ if days then
+ local nd = os.nofdays(y,m)
+ for d=1,nd do
+ local day = days[d]
+ local xoffset = (d-1) * 24
+ local wd = os.weekday(d,m,y)
+ local weekend = wd == 1 or wd == 7
+ if not weekend then
+ -- okay
+ elseif mark == 1 then
+ context("draw (%s,%s) -- (%s,%s) ; ",xoffset, -17.5,xoffset, -32.5)
+ context("draw (%s,%s) -- (%s,%s) ; ",xoffset+24,-17.5,xoffset+24,-32.5)
+ elseif mark == 2 then
+ context("draw (%s,%s) -- (%s,%s) ; ",xoffset, -17.5,xoffset+24,-17.5)
+ context("draw (%s,%s) -- (%s,%s) ; ",xoffset, -32.5,xoffset+24,-32.5)
+ elseif mark == 3 then
+ context("draw unitsquare xysized (%s,%s) shifted (%s,%s) ; ",24,15,xoffset,-32.5)
+ end
+ context([[draw textext("%s") shifted (%s,%s) ; ]],d,xoffset + 12,-25)
+ if day then
+ for h=0,23 do
+ local hours = day.hours
+ if hours then
+ local hour = hours[h]
+ if hour then
+ local dx = xoffset + h
+ local dy = hour.watt/scale
+ local dm = hour.maxwatt/scale
+ context("draw (%s,%s) -- (%s,%s) withcolor %s ; ",dx, 0,dx,dy,weekend and "darkmagenta" or "darkblue")
+ context("draw (%s,%s) -- (%s,%s) withcolor %s ; ",dx,dy,dx,dm,"darkgray")
+ end
+ end
+ end
+ end
+ end
+ for d=0,30 do
+ local xoffset = d * 24
+ context("draw (%s,%s) -- (%s,%s) withcolor darkgray ; ",xoffset+ 0,0,xoffset+ 0,-10)
+ context("draw (%s,%s) -- (%s,%s) withcolor darkgray ; ",xoffset+ 6,0,xoffset+ 6,-2.5)
+ context("draw (%s,%s) -- (%s,%s) withcolor darkgray ; ",xoffset+12,0,xoffset+12,-5)
+ context("draw (%s,%s) -- (%s,%s) withcolor darkgray ; ",xoffset+18,0,xoffset+18,-2.5)
+ end
+ local xoffset = 31 * 24
+ context("draw (%s,%s) -- (%s,%s) withcolor darkgray ; ",xoffset,0,xoffset,-10)
+ end
+
+ local max = (math.div(year.maxwatt,1000)+1)
+
+ for i=0,max*1000,1000 do
+ context([[draw textext.lft("%s") shifted (%s,%s) ; ]],i,-10,i/scale)
+ context("draw (%s,%s) -- (%s,%s) withcolor .2white ;",0,i/scale,31 * 24,i/scale)
+ end
+
+ context([[draw textext("\strut\month{%s}\enspace%s") shifted (%s,%s) ; ]],m, y, 31 * 24 / 2, -50)
+ context([[draw textext.lft("watt") shifted (%s,%s) ; ]],-10,-25)
+
+ context.stopMPpage()
+ else
+ -- maybe placeholder
+ end
+ end
+
+ end
+
+ end
+
+\stopluacode
+
+\continueifinputfile{s-youless.mkiv}
+
+\setupbodyfont[dejavu] % smaller sizes also look ok
+
+% printer (oce) : > 3000 W startup (900 W idle, 2000 W printing)
+% coffeemaker : 1500 W when heating
+
+% baseline day : 2250 W (servers, airco, workstations, routers, switches, heating, etc)
+% baseline night : 1750 W
+
+\starttext
+
+ \startluacode
+
+ -- os.execute([[mtxrun --script youless --collect --watt "c:/data/system/youless/data/youless-watt.lua"]])
+ -- os.execute([[mtxrun --script youless --collect --watt --nobackup "c:/data/system/youless/data/youless-watt.lua"]])
+ -- moduledata.youless.watt { year = 2013, filename = "c:/data/system/youless/data/youless-watt.lua" }
+
+ moduledata.youless.watt { year = 2013, filename = "youless-watt.lua" }
+
+ \stopluacode
+
+\stoptext
diff --git a/Master/texmf-dist/tex/context/base/scrn-but.lua b/Master/texmf-dist/tex/context/base/scrn-but.lua
index e49372ce9fd..74f6e0cd9af 100644
--- a/Master/texmf-dist/tex/context/base/scrn-but.lua
+++ b/Master/texmf-dist/tex/context/base/scrn-but.lua
@@ -6,6 +6,9 @@ if not modules then modules = { } end modules ['scrn-but'] = {
license = "see context related readme files"
}
+local commands = commands
+local context = context
+
local f_two_colon = string.formatters["%s:%s"]
function commands.registerbuttons(tag,register,language)
diff --git a/Master/texmf-dist/tex/context/base/scrn-but.mkvi b/Master/texmf-dist/tex/context/base/scrn-but.mkvi
index fd2da9e0841..f8b236c522a 100644
--- a/Master/texmf-dist/tex/context/base/scrn-but.mkvi
+++ b/Master/texmf-dist/tex/context/base/scrn-but.mkvi
@@ -217,12 +217,12 @@
{\global\settrue\c_scrn_button_skipped}
\def\scrn_button_make_normal#currentparameter#inheritedframed#letparameter#setparameter#text%
- {\ctxlua{structures.references.injectcurrentset(nil,nil)}%
+ {\ctxcommand{injectcurrentreference()}%
\hbox attr \referenceattribute \lastreferenceattribute
{#inheritedframed{\ignorespaces#text\removeunwantedspaces}}}
\def\scrn_button_make_contrast#currentparameter#inheritedframed#letparameter#setparameter#text%
- {\ctxlua{structures.references.injectcurrentset(nil,nil)}%
+ {\ctxcommand{injectcurrentreference()}%
\hbox attr \referenceattribute \lastreferenceattribute
{#setparameter\c!foregroundcolor{#currentparameter\c!contrastcolor}%
#inheritedframed{\ignorespaces#text\removeunwantedspaces}}}
diff --git a/Master/texmf-dist/tex/context/base/scrn-fld.lua b/Master/texmf-dist/tex/context/base/scrn-fld.lua
index 9836cbebea3..69480b88794 100644
--- a/Master/texmf-dist/tex/context/base/scrn-fld.lua
+++ b/Master/texmf-dist/tex/context/base/scrn-fld.lua
@@ -11,6 +11,8 @@ if not modules then modules = { } end modules ['scrn-fld'] = {
local variables = interfaces.variables
local v_yes = variables.yes
+local texsetbox = tex.setbox
+
local fields = { }
interactions.fields = fields
@@ -43,7 +45,7 @@ commands.definefieldset = defineset
commands.clonefield = clone
function commands.insertfield(name,specification)
- tex.box["b_scrn_field_body"] = insert(name,specification)
+ texsetbox("b_scrn_field_body",insert(name,specification))
end
-- (for the monent) only tex interface
diff --git a/Master/texmf-dist/tex/context/base/scrn-hlp.lua b/Master/texmf-dist/tex/context/base/scrn-hlp.lua
index 5f8368c6dc3..d344ce2802c 100644
--- a/Master/texmf-dist/tex/context/base/scrn-hlp.lua
+++ b/Master/texmf-dist/tex/context/base/scrn-hlp.lua
@@ -11,6 +11,9 @@ local format = string.format
local help = { }
interactions.help = help
+local context = context
+local commands = commands
+
local a_help = attributes.private("help")
local copy_nodelist = node.copy_list
@@ -18,6 +21,8 @@ local hpack_nodelist = node.hpack
local register_list = nodes.pool.register
+local texgetbox = tex.getbox
+
local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
@@ -48,7 +53,7 @@ function help.register(number,name,box)
interactions.javascripts.setpreamble("HelpTexts",helpscript)
helpscript = false
end
- local b = copy_nodelist(tex.box[box])
+ local b = copy_nodelist(texgetbox(box))
register_list(b)
data[number] = b
if name and name ~= "" then
@@ -81,7 +86,7 @@ end
function help.collect(box)
if next(data) then
- return collect(tex.box[box].list)
+ return collect(texgetbox(box).list)
end
end
diff --git a/Master/texmf-dist/tex/context/base/scrn-wid.lua b/Master/texmf-dist/tex/context/base/scrn-wid.lua
index 4ad46761ee8..5b319b07ea7 100644
--- a/Master/texmf-dist/tex/context/base/scrn-wid.lua
+++ b/Master/texmf-dist/tex/context/base/scrn-wid.lua
@@ -9,19 +9,27 @@ if not modules then modules = { } end modules ['scrn-wid'] = {
interactions = interactions or { }
local interactions = interactions
-local attachments = { }
-local comments = { }
-local soundclips = { }
-local renderings = { }
-local linkedlists = { }
+local context = context
+
+local allocate = utilities.storage.allocate
+
+local attachments = allocate()
+local comments = allocate()
+local soundclips = allocate()
+local renderings = allocate()
+local linkedlists = allocate()
interactions.attachments = attachments
interactions.soundclips = soundclips
interactions.renderings = renderings
interactions.linkedlists = linkedlists
+local texsetbox = tex.setbox
+
local jobpasses = job.passes
+local texgetcount = tex.getcount
+
local codeinjections = backends.codeinjections
local nodeinjections = backends.nodeinjections
@@ -103,7 +111,7 @@ end
commands.registerattachment = attachments.register
function commands.insertattachment(specification)
- tex.box["b_scrn_attachment_link"] = attachments.insert(specification)
+ texsetbox("b_scrn_attachment_link",(attachments.insert(specification)))
end
-- Comment
@@ -117,7 +125,7 @@ function comments.insert(specification)
end
function commands.insertcomment(specification)
- tex.box["b_scrn_comment_link"] = comments.insert(specification)
+ texsetbox("b_scrn_comment_link",(comments.insert(specification)))
end
-- Soundclips
@@ -195,7 +203,7 @@ end
function commands.enhancelinkedlist(tag,n)
local ll = jobpasses.gettobesaved(tag)
if ll then
- ll[n] = texcount.realpageno
+ ll[n] = texgetcount("realpageno")
end
end
diff --git a/Master/texmf-dist/tex/context/base/scrn-wid.mkvi b/Master/texmf-dist/tex/context/base/scrn-wid.mkvi
index ae5f7c5567b..8dcc7a86a23 100644
--- a/Master/texmf-dist/tex/context/base/scrn-wid.mkvi
+++ b/Master/texmf-dist/tex/context/base/scrn-wid.mkvi
@@ -16,6 +16,7 @@
\registerctxluafile{scrn-wid}{1.001}
% todo: expansion in comments (default is expanded)
+% todo: check renderings ... acrobat crashes too easily on missing one
\unprotect
@@ -400,7 +401,7 @@
{\doifassignmentelse{#title}
{\setupcurrentcomment[#title]}
{\setupcurrentcomment[\c!title=#title,#settings]}%
- \ctxlua{buffers.assign("\v!comment",\!!bs#text\!!es)}% todo: expansion control, but expanded by default (xml)
+ \ctxcommand{assignbuffer("\v!comment",\!!bs#text\!!es)}% todo: expansion control, but expanded by default (xml)
\scrn_comment_inject
\ignorespaces}
@@ -566,8 +567,8 @@
\definereference[PauseCurrentRendering] [\v!PauseRendering {\currentrendering}]
\definereference[ResumeCurrentRendering][\v!ResumeRendering{\currentrendering}]
-\def\useexternalrendering{\doquadrupleempty\scrn_rendering_use}
-\def\setinternalrendering{\dodoubleempty \scrn_rendering_set}
+\unexpanded\def\useexternalrendering{\doquadrupleempty\scrn_rendering_use}
+\unexpanded\def\setinternalrendering{\dodoubleempty \scrn_rendering_set}
\def\scrn_rendering_use[#tag][#mime][#file][#option]%
{\ctxcommand{registerrendering{
@@ -578,7 +579,7 @@
option = "#option",
}}}
-\def\scrn_rendering_set[#tag][#option]% {content}
+\def\scrn_rendering_set[#tag][#option]% {content} % crappy
{\bgroup
\dowithnextbox
{\ctxcommand{registerrendering{
@@ -615,17 +616,9 @@
\unexpanded\def\placerenderingwindow
{\dodoubleempty\scrn_rendering_place_window}
-\def\scrn_rendering_place_window[#window][#rendering]%
+\def\scrn_rendering_place_window[#window][#rendering]% do all in lua
{\bgroup
\edef\currentrendering{\ifsecondargument#rendering\else#window\fi}%
- \doifelse{\renderingtype\currentrendering}{internal} % an object
- {\getobjectdimensions{IRO}\currentrendering
- \d_scrn_rendering_height\dimexpr\objectheight+\objectdepth\relax
- \d_scrn_rendering_width\objectwidth\relax
- \dogetobjectreferencepage{IRO}\currentrendering\m_scrn_rendering_page}%
- {\d_scrn_rendering_height\vsize
- \d_scrn_rendering_width\hsize
- \let\m_scrn_rendering_page\realpageno}%
% create fall back if needed
\edef\currentrenderingwindow{\namedrenderingwindowparameter{#window}\c!width}% stupid test, we need a proper one here
\ifx\currentrenderingwindow\empty
@@ -634,6 +627,21 @@
\else
\edef\currentrenderingwindow{#window}%
\fi
+ \edef\currentrenderingtype{\renderingtype\currentrendering}%
+ \ifx\currentrenderingtype\s!internal
+ \getobjectdimensions{IRO}\currentrendering
+ \d_scrn_rendering_height\dimexpr\objectheight+\objectdepth\relax
+ \d_scrn_rendering_width\objectwidth\relax
+ \dogetobjectreferencepage{IRO}\currentrendering\m_scrn_rendering_page
+ \else\ifx\currentrenderingwindow\s!default
+ \d_scrn_rendering_height\vsize
+ \d_scrn_rendering_width \hsize
+ \let\m_scrn_rendering_page\realpageno
+ \else
+ \d_scrn_rendering_height\renderingwindowparameter\c!height
+ \d_scrn_rendering_width \renderingwindowparameter\c!width
+ \let\m_scrn_rendering_page\realpageno
+ \fi\fi
% todo:
% \handlereferenceactions{\renderingwindowparameter\c!openpageaction }\dosetuprenderingopenpageaction
% \handlereferenceactions{\renderingwindowparameter\c!closepageaction}\dosetuprenderingclosepageaction
diff --git a/Master/texmf-dist/tex/context/base/scrp-cjk.lua b/Master/texmf-dist/tex/context/base/scrp-cjk.lua
index f7167b45c0b..9050da6be20 100644
--- a/Master/texmf-dist/tex/context/base/scrp-cjk.lua
+++ b/Master/texmf-dist/tex/context/base/scrp-cjk.lua
@@ -14,13 +14,29 @@ if not modules then modules = { } end modules ['scrp-cjk'] = {
-- sense either because otherwise a wanted space at the end of a
-- line would have to be a hard coded ones.
-local utfchar = utf.char
-
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
-local remove_node = nodes.remove
-
-local nodepool = nodes.pool
+local utfchar = utf.getchar
+
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+local tonode = nodes.tonode
+
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local copy_node = nuts.copy
+local remove_node = nuts.remove
+local traverse_id = nuts.traverse_id
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getattr = nuts.getattr
+local getsubtype = nuts.getsubtype
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+
+local nodepool = nuts.pool
local new_glue = nodepool.glue
local new_kern = nodepool.kern
local new_penalty = nodepool.penalty
@@ -44,6 +60,8 @@ local fontdata = fonthashes.identifiers
local quaddata = fonthashes.quads
local spacedata = fonthashes.spaces
+local decomposed = characters.hangul.decomposed
+
local trace_details = false trackers.register("scripts.details", function(v) trace_details = v end)
local report_details = logs.reporter("scripts","detail")
@@ -84,20 +102,20 @@ end
-- at font definition time and/or just assume a correct font
local function trace_detail(current,what)
- local prev = current.prev
- local c_id = current.id
- local p_id = prev and prev.id
+ local prev = getprev(current)
+ local c_id = getid(current)
+ local p_id = prev and getid(prev)
if c_id == glyph_code then
- local c_ch = current.char
+ local c_ch = getchar(current)
if p_id == glyph_code then
- local p_ch = p_id and prev.char
+ local p_ch = p_id and getchar(prev)
report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,c_ch,hash[c_ch])
else
report_details("[%s] [%C %a]",what,c_ch,hash[c_ch])
end
else
if p_id == glyph_code then
- local p_ch = p_id and prev.char
+ local p_ch = p_id and getchar(prev)
report_details("[%C %a] [%s]",p_ch,hash[p_ch],what)
else
report_details("[%s]",what)
@@ -106,8 +124,8 @@ local function trace_detail(current,what)
end
local function trace_detail_between(p,n,what)
- local p_ch = p.char
- local n_ch = n.char
+ local p_ch = getchar(p)
+ local n_ch = getchar(n)
report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,n_ch,hash[n_ch])
end
@@ -423,29 +441,29 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = first.next, first.id
+ local upcoming, id = getnext(first), getid(first)
if id == glyph_code then
- local a = first[a_scriptstatus]
+ local a = getattr(first,a_scriptstatus)
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = first.font
+ local font = getfont(first)
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[first[a_scriptinjection]])
+ set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
end
action(head,first)
end
end
previous = current
else -- glue
- local p, n = first.prev, upcoming
+ local p, n = getprev(first), upcoming
if p and n then
- local pid, nid = p.id, n.id
+ local pid, nid = getid(p), getid(n)
if pid == glyph_code and nid == glyph_code then
- local pa, na = p[a_scriptstatus], n[a_scriptstatus]
+ local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -490,6 +508,41 @@ scripts.installmethod {
},
}
+function scripts.decomposehangul(head)
+ local head = tonut(head)
+ local done = false
+ for current in traverse_id(glyph_code,head) do
+ local lead_consonant, medial_vowel, tail_consonant = decomposed(getchar(current))
+ if lead_consonant then
+ setfield(current,"char",lead_consonant)
+ local m = copy_node(current)
+ setfield(m,"char",medial_vowel)
+ head, current = insert_node_after(head,current,m)
+ if tail_consonant then
+ local t = copy_node(current)
+ setfield(t,"char",tail_consonant)
+ head, current = insert_node_after(head,current,t)
+ end
+ done = true
+ end
+ end
+ return tonode(head), done
+end
+
+-- nodes.tasks.prependaction("processors","normalizers","scripts.decomposehangul")
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+registerotffeature {
+ name = "decomposehangul",
+ description = "decompose hangul",
+ processors = {
+ position = 1,
+ node = scripts.decomposehangul,
+ }
+}
+
-- Chinese: hanzi
local chinese_0 = {
@@ -644,29 +697,29 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = first.next, first.id
+ local upcoming, id = getnext(first), getid(first)
if id == glyph_code then
- local a = first[a_scriptstatus]
+ local a = getattr(first,a_scriptstatus)
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = first.font
+ local font = getfont(first)
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[first[a_scriptinjection]])
+ set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
end
action(head,first)
end
end
previous = current
else -- glue
- local p, n = first.prev, upcoming
+ local p, n = getprev(first), upcoming
if p and n then
- local pid, nid = p.id, n.id
+ local pid, nid = getid(p), getid(n)
if pid == glyph_code and nid == glyph_code then
- local pa, na = p[a_scriptstatus], n[a_scriptstatus]
+ local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -866,34 +919,32 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = first.next, first.id
+ local upcoming, id = getnext(first), getid(first)
if id == glyph_code then
- local a = first[a_scriptstatus]
+ local a = getattr(first,a_scriptstatus)
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = first.font
+ local font = getfont(first)
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[first[a_scriptinjection]])
+ set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
end
action(head,first)
end
end
previous = current
-
--- elseif id == math_code then
--- upcoming = end_of_math(current).next
--- previous = "start"
-
+ -- elseif id == math_code then
+ -- upcoming = getnext(end_of_math(current))
+ -- previous = "start"
else -- glue
- local p, n = first.prev, upcoming -- we should remember prev
+ local p, n = getprev(first), upcoming -- we should remember prev
if p and n then
- local pid, nid = p.id, n.id
+ local pid, nid = getid(p), getid(n)
if pid == glyph_code and nid == glyph_code then
- local pa, na = p[a_scriptstatus], n[a_scriptstatus]
+ local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -902,17 +953,17 @@ local function process(head,first,last)
or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean
previous = "start"
else -- if head ~= first then
-if id == glue_code and first.subtype == userskip_code then -- also scriptstatus check?
- -- for the moment no distinction possible between space and userskip
- local w = first.spec.width
- local s = spacedata[p.font]
- if w == s then -- could be option
- if trace_details then
- trace_detail_between(p,n,"space removed")
- end
- remove_node(head,first,true)
- end
-end
+ if id == glue_code and getsubtype(first) == userskip_code then -- also scriptstatus check?
+ -- for the moment no distinction possible between space and userskip
+ local w = getfield(getfield(first,"spec"),"width")
+ local s = spacedata[getfont(p)]
+ if w == s then -- could be option
+ if trace_details then
+ trace_detail_between(p,n,"space removed")
+ end
+ remove_node(head,first,true)
+ end
+ end
previous = pcjk
-- else
-- previous = pcjk
@@ -948,4 +999,3 @@ scripts.installmethod {
},
},
}
-
diff --git a/Master/texmf-dist/tex/context/base/scrp-eth.lua b/Master/texmf-dist/tex/context/base/scrp-eth.lua
index 597afa1b558..8ecbce52295 100644
--- a/Master/texmf-dist/tex/context/base/scrp-eth.lua
+++ b/Master/texmf-dist/tex/context/base/scrp-eth.lua
@@ -9,9 +9,17 @@ if not modules then modules = { } end modules ['scrp-eth'] = {
-- at some point I will review the script code but for the moment we
-- do it this way; so space settings like with cjk yet
-local insert_node_before = node.insert_before
+local nuts = nodes.nuts
-local nodepool = nodes.pool
+local getnext = nuts.getnext
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getattr = nuts.getattr
+
+local insert_node_before = nuts.insert_before
+
+local nodepool = nuts.pool
local new_glue = nodepool.glue
local new_penalty = nodepool.penalty
@@ -37,13 +45,13 @@ local inter_character_stretch_factor = 1
local inter_character_shrink_factor = 1
local function space_glue(current)
- local data = numbertodataset[current[a_scriptinjection]]
+ local data = numbertodataset[getattr(current,a_scriptinjection)]
if data then
inter_character_space_factor = data.inter_character_space_factor or 1
inter_character_stretch_factor = data.inter_character_stretch_factor or 1
inter_character_shrink_factor = data.inter_character_shrink_factor or 1
end
- local font = current.font
+ local font = getfont(current)
if lastfont ~= font then
local pf = parameters[font]
space = pf.space
@@ -104,9 +112,9 @@ local function process(head,first,last)
local injector = false
local current = first
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local scriptstatus = current[a_scriptstatus]
+ local scriptstatus = getattr(current,a_scriptstatus)
local category = numbertocategory[scriptstatus]
if injector then
local action = injector[category]
@@ -121,7 +129,7 @@ local function process(head,first,last)
if current == last then
break
else
- current = current.next
+ current = getnext(current)
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/scrp-ini.lua b/Master/texmf-dist/tex/context/base/scrp-ini.lua
index 18f86475ffc..a6bfe4cf9e3 100644
--- a/Master/texmf-dist/tex/context/base/scrp-ini.lua
+++ b/Master/texmf-dist/tex/context/base/scrp-ini.lua
@@ -11,15 +11,16 @@ if not modules then modules = { } end modules ['scrp-ini'] = {
local attributes, nodes, node = attributes, nodes, node
-local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end)
-local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end)
+local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end)
+local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end)
+local trace_splitting = false trackers.register("scripts.splitting", function(v) trace_splitting = v end)
+local trace_splitdetail = false trackers.register("scripts.splitting.detail", function(v) trace_splitdetail = v end)
local report_preprocessing = logs.reporter("scripts","preprocessing")
+local report_splitting = logs.reporter("scripts","splitting")
-local utfchar = utf.char
-
-local first_glyph = node.first_glyph or node.first_character
-local traverse_id = node.traverse_id
+local utfbyte, utfsplit = utf.byte, utf.split
+local gmatch = string.gmatch
local texsetattribute = tex.setattribute
@@ -29,6 +30,9 @@ local unsetvalue = attributes.unsetvalue
local glyph_code = nodecodes.glyph
local glue_code = nodecodes.glue
+local emwidths = fonts.hashes.emwidths
+local exheights = fonts.hashes.exheights
+
local a_scriptinjection = attributes.private('scriptinjection')
local a_scriptsplitting = attributes.private('scriptsplitting')
local a_scriptstatus = attributes.private('scriptstatus')
@@ -41,6 +45,28 @@ local setmetatableindex = table.setmetatableindex
local enableaction = nodes.tasks.enableaction
local disableaction = nodes.tasks.disableaction
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getchar = nuts.getchar
+local getfont = nuts.getfont
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local insert_node_after = nuts.insert_after
+local first_glyph = nuts.first_glyph
+local traverse_id = nuts.traverse_id
+
+local nodepool = nuts.pool
+
+local new_glue = nodepool.glue
+local new_rule = nodepool.rule
+local new_penalty = nodepool.penalty
+----- new_gluespec = nodepool.gluespec
+
scripts = scripts or { }
local scripts = scripts
@@ -198,7 +224,7 @@ local function provide(t,k)
return v
end
-setmetatableindex(hash,provide)
+setmetatableindex(hash,provide) -- should come from char-def
scripts.hash = hash
@@ -385,7 +411,7 @@ scripts.numbertocategory = numbertocategory
local function colorize(start,stop)
for n in traverse_id(glyph_code,start) do
- local kind = numbertocategory[n[a_scriptstatus]]
+ local kind = numbertocategory[getattr(n,a_scriptstatus)]
if kind then
local ac = scriptcolors[kind]
if ac then
@@ -417,16 +443,17 @@ end
-- we can have a fonts.hashes.originals
function scripts.injectors.handler(head)
+ head = tonut(head)
local start = first_glyph(head) -- we already have glyphs here (subtype 1)
if not start then
- return head, false
+ return tonode(head), false
else
local last_a, normal_process, lastfont, originals = nil, nil, nil, nil
local done, first, last, ok = false, nil, nil, false
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- local a = start[a_scriptinjection]
+ local a = getattr(start,a_scriptinjection)
if a then
if a ~= last_a then
if first then
@@ -448,7 +475,7 @@ function scripts.injectors.handler(head)
normal_process = handler.injector
end
if normal_process then
- local f = start.font
+ local f = getfont(start)
if f ~= lastfont then
originals = fontdata[f].resources
if resources then
@@ -458,13 +485,13 @@ function scripts.injectors.handler(head)
end
lastfont = f
end
- local c = start.char
+ local c = getchar(start)
if originals then
c = originals[c] or c
end
local h = hash[c]
if h then
- start[a_scriptstatus] = categorytonumber[h]
+ setattr(start,a_scriptstatus,categorytonumber[h])
if not first then
first, last = start, start
else
@@ -525,7 +552,7 @@ function scripts.injectors.handler(head)
first, last = nil, nil
end
end
- start = start.next
+ start = getnext(start)
end
if ok then
if trace_analyzing then
@@ -538,14 +565,294 @@ function scripts.injectors.handler(head)
end
done = true
end
- return head, done
+ return tonode(head), done
+ end
+end
+
+-- kind of experimental .. might move to it's own module
+
+-- function scripts.splitters.handler(head)
+-- return head, false
+-- end
+
+local function addwords(tree,data)
+ if not tree then
+ tree = { }
+ end
+ for word in gmatch(data,"%S+") do
+ local root = tree
+ local list = utfsplit(word,true)
+ for i=1,#list do
+ local l = utfbyte(list[i])
+ local r = root[l]
+ if not r then
+ r = { }
+ root[l] = r
+ end
+ if i == #list then
+ r.final = word -- true -- could be something else, like word in case of tracing
+ else
+ root = r
+ end
+ end
end
+ return tree
end
-function scripts.splitters.handler(head)
- return head, false
+local loaded = { }
+
+function splitters.load(handler,files)
+ local files = handler.files
+ local tree = handler.tree or { }
+ handler.tree = tree
+ if not files then
+ return
+ elseif type(files) == "string" then
+ files = { files }
+ handler.files = files
+ end
+ if trace_splitting then
+ report_splitting("loading splitter data for language/script %a",handler.name)
+ end
+ loaded[handler.name or "unknown"] = (loaded[handler.name or "unknown"] or 0) + 1
+ statistics.starttiming(loaded)
+ for i=1,#files do
+ local filename = files[i]
+ local fullname = resolvers.findfile(filename)
+ if fullname == "" then
+ fullname = resolvers.findfile(filename .. ".gz")
+ end
+ if fullname ~= "" then
+ if trace_splitting then
+ report_splitting("loading file %a",fullname)
+ end
+ local suffix, gzipped = gzip.suffix(fullname)
+ if suffix == "lua" then
+ local specification = table.load(fullname,gzipped and gzip.load)
+ if specification then
+ local lists = specification.lists
+ if lists then
+ for i=1,#lists do
+ local entry = lists[i]
+ local data = entry.data
+ if data then
+ if entry.compression == "zlib" then
+ data = zlib.decompress(data)
+ if entry.length and entry.length ~= #data then
+ report_splitting("compression error in file %a",fullname)
+ end
+ end
+ if data then
+ addwords(tree,data)
+ end
+ end
+ end
+ end
+ end
+ else
+ local data = gzipped and io.loadgzip(fullname) or io.loaddata(fullname)
+ if data then
+ addwords(tree,data)
+ end
+ end
+ else
+ report_splitting("unknown file %a",filename)
+ end
+ end
+ statistics.stoptiming(loaded)
+ return tree
end
+statistics.register("loaded split lists", function()
+ if next(loaded) then
+ return string.format("%s, load time: %s",table.sequenced(loaded),statistics.elapsedtime(loaded))
+ end
+end)
+
+-- function splitters.addlist(name,filename)
+-- local handler = scripts.handlers[name]
+-- if handler and filename then
+-- local files = handler.files
+-- if not files then
+-- files = { }
+-- elseif type(files) == "string" then
+-- files = { files }
+-- end
+-- handler.files = files
+-- if type(filename) == "string" then
+-- filename = utilities.parsers.settings_to_array(filename)
+-- end
+-- if type(filename) == "table" then
+-- for i=1,#filename do
+-- files[#files+1] = filenames[i]
+-- end
+-- end
+-- end
+-- end
+--
+-- commands.setscriptsplitterlist = splitters.addlist
+
+local categories = characters.categories or { }
+
+local function hit(root,head)
+ local current = getnext(head)
+ local lastrun = false
+ local lastfinal = false
+ while current and getid(current) == glyph_code do
+ local char = getchar(current)
+ local newroot = root[char]
+ if newroot then
+ local final = newroot.final
+ if final then
+ lastrun = current
+ lastfinal = final
+ end
+ root = newroot
+ elseif categories[char] == "mn" then
+ -- continue
+ else
+ return lastrun, lastfinal
+ end
+ current = getnext(current)
+ end
+ if lastrun then
+ return lastrun, lastfinal
+ end
+end
+
+local tree, attr, proc
+
+function splitters.handler(head) -- todo: also first_glyph test
+ head = tonut(head)
+ local current = head
+ local done = false
+ while current do
+ if getid(current) == glyph_code then
+ local a = getattr(current,a_scriptsplitting)
+ if a then
+ if a ~= attr then
+ local handler = numbertohandler[a]
+ tree = handler.tree or { }
+ attr = a
+ proc = handler.splitter
+ end
+ if proc then
+ local root = tree[getchar(current)]
+ if root then
+ -- we don't check for attributes in the hitter (yet)
+ local last, final = hit(root,current)
+ if last then
+ local next = getnext(last)
+ if next and getid(next) == glyph_code then
+ local nextchar = getchar(next)
+ if tree[nextchar] then
+ if trace_splitdetail then
+ if type(final) == "string" then
+ report_splitting("advance %s processing between <%s> and <%c>","with",final,nextchar)
+ else
+ report_splitting("advance %s processing between <%c> and <%c>","with",char,nextchar)
+ end
+ end
+ head, current = proc(handler,head,current,last,1)
+ done = true
+ else
+ if trace_splitdetail then
+ -- could be punctuation
+ if type(final) == "string" then
+ report_splitting("advance %s processing between <%s> and <%c>","without",final,nextchar)
+ else
+ report_splitting("advance %s processing between <%c> and <%c>","without",char,nextchar)
+ end
+ end
+ head, current = proc(handler,head,current,last,2)
+ done = true
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ current = getnext(current)
+ end
+ return tonode(head), done
+end
+
+local function marker(head,current,font,color) -- could become: nodes.tracers.marker
+ local ex = exheights[font]
+ local em = emwidths [font]
+ head, current = insert_node_after(head,current,new_penalty(10000))
+ head, current = insert_node_after(head,current,new_glue(-0.05*em))
+ head, current = insert_node_after(head,current,new_rule(0.05*em,1.5*ex,0.5*ex))
+ setnodecolor(current,color)
+ return head, current
+end
+
+-- local function process(handler,head,first,last)
+-- dataset = numbertodataset[first[a_scriptsplitting]]
+-- stretch = emwidths[first.font]*dataset.inter_word_stretch_factor
+-- return insert_node_after(head,last,new_glue(0,stretch))
+-- end
+--
+-- local cache = { } table.setmetatableindex(cache,function(t,k)
+-- local v = new_gluespec(0,k)
+-- nodepool.register(v)
+-- t[k] = v
+-- return v
+-- end)
+-- return insert_node_after(head,last,new_glue(cache[last_s]))
+
+local last_a, last_f, last_s, last_q
+
+function splitters.insertafter(handler,head,first,last,detail)
+ local a = getattr(first,a_scriptsplitting)
+ local f = getfont(first)
+ if a ~= last_a or f ~= last_f then
+ last_s = emwidths[f] * numbertodataset[a].inter_word_stretch_factor
+ last_a = a
+ last_f = f
+ end
+ if trace_splitting then
+ head, last = marker(head,last,f,detail == 2 and "trace:r" or "trace:g")
+ end
+ if ignore then
+ return head, last
+ else
+ return insert_node_after(head,last,new_glue(0,last_s))
+ end
+end
+
+-- word-xx.lua:
+--
+-- return {
+-- comment = "test",
+-- copyright = "not relevant",
+-- language = "en",
+-- timestamp = "2013-05-20 14:15:21",
+-- version = "1.00",
+-- lists = {
+-- {
+-- -- data = "we thrive information in thick worlds because of our marvelous and everyday capacity to select edit single out structure highlight group pair merge harmonize synthesize focus organize condense reduce boil down choose categorize catalog classify list abstract scan look into idealize isolate discriminate distinguish screen pigeonhole pick over sort integrate blend inspect filter lump skip smooth chunk average approximate cluster aggregate outline summarize itemize review dip into flip through browse glance into leaf through skim refine enumerate glean synopsize winnow the wheat from the chaff and separate the sheep from the goats",
+-- data = "abstract aggregate and approximate average because blend boil browse capacity catalog categorize chaff choose chunk classify cluster condense dip discriminate distinguish down edit enumerate everyday filter flip focus from glance glean goats group harmonize highlight idealize in information inspect integrate into isolate itemize leaf list look lump marvelous merge of organize our out outline over pair pick pigeonhole reduce refine review scan screen select separate sheep single skim skip smooth sort structure summarize synopsize synthesize the thick thrive through to we wheat winnow worlds",
+-- },
+-- },
+-- }
+
+scripts.installmethod {
+ name = "test",
+ splitter = splitters.insertafter,
+ initializer = splitters.load,
+ files = {
+ -- "scrp-imp-word-test.lua",
+ "word-xx.lua",
+ },
+ datasets = {
+ default = {
+ inter_word_stretch_factor = 0.25, -- of quad
+ },
+ },
+}
+
-- new plugin:
local registercontext = fonts.specifiers.registercontext
@@ -576,15 +883,15 @@ setmetatableindex(cache_nop,function(t,k) local v = { } t[k] = v return v end)
-- playing nice
function autofontfeature.handler(head)
- for n in traverse_id(glyph_code,head) do
- -- if n[a_scriptinjection] then
+ for n in traverse_id(glyph_code,tonut(head)) do
+ -- if getattr(n,a_scriptinjection) then
-- -- already tagged by script feature, maybe some day adapt
-- else
- local char = n.char
+ local char = getchar(n)
local script = otfscripts[char]
if script then
- local dynamic = n[0] or 0
- local font = n.font
+ local dynamic = getattr(n,0) or 0
+ local font = getfont(n)
if dynamic > 0 then
local slot = cache_yes[font]
local attr = slot[script]
@@ -610,7 +917,7 @@ function autofontfeature.handler(head)
end
end
if attr ~= 0 then
- n[0] = attr
+ setattr(n,0,attr)
-- maybe set scriptinjection when associated
end
end
diff --git a/Master/texmf-dist/tex/context/base/scrp-ini.mkiv b/Master/texmf-dist/tex/context/base/scrp-ini.mkiv
index fe62295bb62..4a27dd8e2bf 100644
--- a/Master/texmf-dist/tex/context/base/scrp-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/scrp-ini.mkiv
@@ -16,6 +16,7 @@
\registerctxluafile{scrp-ini}{1.001}
\registerctxluafile{scrp-cjk}{1.001}
\registerctxluafile{scrp-eth}{1.001}
+\registerctxluafile{scrp-tha}{1.001}
\definesystemattribute[scriptinjection][public]
\definesystemattribute[scriptsplitting][public]
diff --git a/Master/texmf-dist/tex/context/base/scrp-tha.lua b/Master/texmf-dist/tex/context/base/scrp-tha.lua
new file mode 100644
index 00000000000..ec5df07c075
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/scrp-tha.lua
@@ -0,0 +1,57 @@
+if not modules then modules = { } end modules ['scrp-tha'] = {
+ version = 1.001,
+ comment = "companion to scrp-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This module needs dictionary files that looks as follows. At some point
+-- we will add these files to the distribution.
+--
+-- word-th.lua:
+--
+-- return {
+-- comment = "The data is taken from http://thailinux.gits.net.th/websvn/wsvn/software.swath by Phaisarn Charoenpornsawat and Theppitak Karoonboonyanan.",
+-- copyright = "gnu general public license",
+-- language = "th",
+-- compiling = "mtxrun --script patterns --words --update --compress word-th.lua",
+-- timestamp = "0000-00-00 00:00:00",
+-- version = "1.00",
+-- lists = {
+-- { filename = "tdict-city.txt" },
+-- { filename = "tdict-collection.txt" },
+-- { filename = "tdict-common.txt" },
+-- { filename = "tdict-country.txt" },
+-- { filename = "tdict-district.txt" },
+-- { filename = "tdict-geo.txt" },
+-- { filename = "tdict-history.txt" },
+-- { filename = "tdict-ict.txt" },
+-- { filename = "tdict-lang-ethnic.txt" },
+-- { filename = "tdict-proper.txt" },
+-- { filename = "tdict-science.txt" },
+-- { filename = "tdict-spell.txt" },
+-- { filename = "tdict-std-compound.txt" },
+-- { filename = "tdict-std.txt" },
+-- },
+-- }
+
+-- Currently there is nothing additional special here, first we need a
+-- ConTeXt user who uses it. It's a starting point.
+
+local splitters = scripts.splitters
+
+scripts.installmethod {
+ name = "thai",
+ splitter = splitters.insertafter,
+ initializer = splitters.load,
+ files = {
+ -- "scrp-imp-word-thai.lua",
+ "word-th.lua",
+ },
+ datasets = {
+ default = {
+ inter_word_stretch_factor = 0.25, -- of quad
+ },
+ },
+}
diff --git a/Master/texmf-dist/tex/context/base/sort-ini.lua b/Master/texmf-dist/tex/context/base/sort-ini.lua
index 479d1c48957..9ac020166a8 100644
--- a/Master/texmf-dist/tex/context/base/sort-ini.lua
+++ b/Master/texmf-dist/tex/context/base/sort-ini.lua
@@ -82,7 +82,7 @@ local v_first = variables.first
local v_last = variables.last
local validmethods = table.tohash {
- -- "ch", -- raw character
+ "ch", -- raw character (for tracing)
"mm", -- minus mapping
"zm", -- zero mapping
"pm", -- plus mapping
@@ -120,7 +120,7 @@ local sorters = sorters
local constants = sorters.constants
local data, language, method, digits
-local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence
+local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence, usedinsequence
local thefirstofsplit
local mte = { -- todo: assign to t
@@ -334,6 +334,9 @@ local function setlanguage(l,m,d,u)
end
end
data.sequence = sequence
+ usedinsequence = table.tohash(sequence)
+ data.usedinsequence = usedinsequence
+-- usedinsequence.ch = true -- better just store the string
if trace_tests then
report_sorters("using sort sequence: % t",sequence)
end
@@ -372,7 +375,9 @@ local function basicsort(sort_a,sort_b)
return 0
end
-function comparers.basic(a,b) -- trace ea and eb
+-- todo: compile compare function
+
+local function basic(a,b) -- trace ea and eb
local ea, eb = a.split, b.split
local na, nb = #ea, #eb
if na == 0 and nb == 0 then
@@ -432,6 +437,12 @@ function comparers.basic(a,b) -- trace ea and eb
end
end
+comparers.basic = basic
+
+function sorters.basicsorter(a,b)
+ return basic(a,b) == -1
+end
+
local function numify(s)
s = digitsoffset + tonumber(s) -- alternatively we can create range
if s > digitsmaximum then
@@ -446,7 +457,7 @@ function sorters.strip(str) -- todo: only letters and such
str = gsub(str,"\\[\"\'~^`]*","") -- \"e -- hm, too greedy
str = gsub(str,"\\%S*","") -- the rest
str = gsub(str,"%s","\001") -- can be option
- str = gsub(str,"[%s%[%](){}%$\"\']*","")
+ str = gsub(str,"[%s%[%](){}%$\"\']*","") -- %s already done
if digits == v_numbers then
str = gsub(str,"(%d+)",numify) -- sort numbers properly
end
@@ -477,7 +488,7 @@ sorters.firstofsplit = firstofsplit
-- for the moment we use an inefficient bunch of tables but once
-- we know what combinations make sense we can optimize this
-function splitters.utf(str) -- we could append m and u but this is cleaner, s is for tracing
+function splitters.utf(str,checked) -- we could append m and u but this is cleaner, s is for tracing
if #replacements > 0 then
-- todo make an lpeg for this
for k=1,#replacements do
@@ -580,18 +591,31 @@ function splitters.utf(str) -- we could append m and u but this is cleaner, s is
-- p_mapping = { p_mappings[fs][1] }
-- end
-- end
- local t = {
- ch = char,
- uc = byte,
- mc = m_case,
- zc = z_case,
- pc = p_case,
- mm = m_mapping,
- zm = z_mapping,
- pm = p_mapping,
- }
- return t
+ if checked then
+ return {
+ ch = trace_tests and char or nil, -- not in sequence
+ uc = usedinsequence.uc and byte or nil,
+ mc = usedinsequence.mc and m_case or nil,
+ zc = usedinsequence.zc and z_case or nil,
+ pc = usedinsequence.pc and p_case or nil,
+ mm = usedinsequence.mm and m_mapping or nil,
+ zm = usedinsequence.zm and z_mapping or nil,
+ pm = usedinsequence.pm and p_mapping or nil,
+ }
+ else
+ return {
+ ch = char,
+ uc = byte,
+ mc = m_case,
+ zc = z_case,
+ pc = p_case,
+ mm = m_mapping,
+ zm = z_mapping,
+ pm = p_mapping,
+ }
+ end
+
end
local function packch(entry)
diff --git a/Master/texmf-dist/tex/context/base/sort-lan.lua b/Master/texmf-dist/tex/context/base/sort-lan.lua
index d2fa276d79a..6b0cc50072e 100644
--- a/Master/texmf-dist/tex/context/base/sort-lan.lua
+++ b/Master/texmf-dist/tex/context/base/sort-lan.lua
@@ -9,6 +9,7 @@ if not modules then modules = { } end modules ['sort-lan'] = {
-- todo: look into uts#10 (2012) ... some experiments ... something
-- to finish in winter.
+-- todo: U+1E9E (german SS)
-- Many vectors were supplied by Wolfgang Schuster and Philipp
-- Gesang. However this is a quite adapted and reformatted variant
@@ -58,7 +59,7 @@ definitions["en"] = { parent = "default" }
definitions['nl'] = {
parent = 'default',
replacements = {
- { "ij", 'y' }, { "IJ", 'Y' },
+ { "ij", 'y' }, { "IJ", 'Y' }, -- hm
},
}
@@ -309,7 +310,7 @@ local ch, CH = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11)
definitions["cz"] = {
replacements = {
- { "ch", ch }, { "CH", CH }
+ { "ch", ch }, { "Ch", ch }, { "CH", ch }
},
entries = {
["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", ["č"] = "č",
diff --git a/Master/texmf-dist/tex/context/base/spac-ali.lua b/Master/texmf-dist/tex/context/base/spac-ali.lua
index ceb278433f2..08e33c5b87c 100644
--- a/Master/texmf-dist/tex/context/base/spac-ali.lua
+++ b/Master/texmf-dist/tex/context/base/spac-ali.lua
@@ -10,17 +10,28 @@ local div = math.div
local format = string.format
local tasks = nodes.tasks
-local appendaction = tasks.appendaction
-local prependaction = tasks.prependaction
-local disableaction = tasks.disableaction
local enableaction = tasks.enableaction
-local slide_nodes = node.slide
-local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here
+local nuts = nodes.nuts
+local nodepool = nuts.pool
-local unsetvalue = attributes.unsetvalue
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
-local concat_nodes = nodes.concat
+local hpack_nodes = nuts.hpack -- nodes.fasthpack not really faster here
+local linked_nodes = nuts.linked
+
+local unsetvalue = attributes.unsetvalue
local nodecodes = nodes.nodecodes
local listcodes = nodes.listcodes
@@ -29,14 +40,12 @@ local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local line_code = listcodes.line
-local nodepool = nodes.pool
-
local new_stretch = nodepool.stretch
local a_realign = attributes.private("realign")
-local texattribute = tex.attribute
-local texcount = tex.count
+local texsetattribute = tex.setattribute
+local texgetcount = tex.getcount
local isleftpage = layouts.status.isleftpage
@@ -58,10 +67,10 @@ local function handler(head,leftpage,realpageno)
local current = head
local done = false
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code then
- if current.subtype == line_code then
- local a = current[a_realign]
+ if getsubtype(current) == line_code then
+ local a = getattr(current,a_realign)
if not a or a == 0 then
-- skip
else
@@ -77,12 +86,12 @@ local function handler(head,leftpage,realpageno)
action = leftpage and 2 or 1
end
if action == 1 then
- current.list = hpack_nodes(concat_nodes{current.list,new_stretch(3)},current.width,"exactly")
+ setfield(current,"list",hpack_nodes(linked_nodes(getlist(current),new_stretch(3)),getfield(current,"width"),"exactly"))
if trace_realign then
report_realign("flushing left, align %a, page %a, realpage %a",align,pageno,realpageno)
end
elseif action == 2 then
- current.list = hpack_nodes(concat_nodes{new_stretch(3),current.list},current.width,"exactly")
+ setfield(current,"list",hpack_nodes(linked_nodes(new_stretch(3),getlist(current)),getfield(current,"width"),"exactly"))
if trace_realign then
report_realign("flushing right. align %a, page %a, realpage %a",align,pageno,realpageno)
end
@@ -92,22 +101,23 @@ local function handler(head,leftpage,realpageno)
done = true
nofrealigned = nofrealigned + 1
end
- current[a_realign] = unsetvalue
+ setattr(current,a_realign,unsetvalue)
end
end
- handler(current.list,leftpage,realpageno)
+ handler(getlist(current),leftpage,realpageno)
elseif id == vlist_code then
- handler(current.list,leftpage,realpageno)
+ handler(getlist(current),leftpage,realpageno)
end
- current = current.next
+ current = getnext(current)
end
return head, done
end
function alignments.handler(head)
local leftpage = isleftpage(true,false)
- local realpageno = texcount.realpageno
- return handler(head,leftpage,realpageno)
+ local realpageno = texgetcount("realpageno")
+ local head, done = handler(tonut(head),leftpage,realpageno)
+ return tonode(head), done
end
local enabled = false
@@ -120,7 +130,7 @@ function alignments.set(n)
report_realign("enabled")
end
end
- texattribute[a_realign] = texcount.realpageno * 10 + n
+ texsetattribute(a_realign,texgetcount("realpageno") * 10 + n)
end
commands.setrealign = alignments.set
diff --git a/Master/texmf-dist/tex/context/base/spac-ali.mkiv b/Master/texmf-dist/tex/context/base/spac-ali.mkiv
index 0f9b211866d..c13e4ca7615 100644
--- a/Master/texmf-dist/tex/context/base/spac-ali.mkiv
+++ b/Master/texmf-dist/tex/context/base/spac-ali.mkiv
@@ -20,6 +20,13 @@
%D merged into one and caching has been added, which makes switching
%D twice as fast.
+% Todo: find a way to force last lines to have some distance from the right
+% edge (problem: keywords or presets), maybe a plugin
+%
+% \setupalign[...,myoption] % last
+%
+% but that also means myoption gets frozen due to caching.
+
\registerctxluafile{spac-ali}{1.001}
\definesystemattribute[realign] [public] % might be combined with the next one
@@ -149,6 +156,17 @@
inline:\ifconditional \inlinelefttoright l2r\else r2l\fi\space
]\endgroup}
+\unexpanded\def\righttolefthbox#1#{\normalhbox dir TRT #1\bgroup\righttoleft\let\next} \let\rtlhbox\righttolefthbox
+\unexpanded\def\lefttorighthbox#1#{\normalhbox dir TLT #1\bgroup\lefttoright\let\next} \let\ltrhbox\lefttorighthbox
+\unexpanded\def\righttoleftvbox#1#{\normalvbox dir TRT #1\bgroup\righttoleft\let\next} \let\rtlvbox\righttoleftvbox
+\unexpanded\def\lefttorightvbox#1#{\normalvbox dir TLT #1\bgroup\lefttoright\let\next} \let\ltrvbox\lefttorightvbox
+\unexpanded\def\righttoleftvtop#1#{\normalvtop dir TRT #1\bgroup\righttoleft\let\next} \let\rtlvtop\righttoleftvtop
+\unexpanded\def\lefttorightvtop#1#{\normalvtop dir TLT #1\bgroup\lefttoright\let\next} \let\ltrvtop\lefttorightvtop
+
+\unexpanded\def\autodirhbox#1#{\hbox#1\bgroup\synchronizeinlinedirection\let\next}
+\unexpanded\def\autodirvbox#1#{\vbox#1\bgroup\synchronizeinlinedirection\let\next} % maybe also pardir or maybe just a \vbox
+\unexpanded\def\autodirvtop#1#{\vtop#1\bgroup\synchronizeinlinedirection\let\next} % maybe also pardir or maybe just a \vtop
+
% Tolerance and hyphenation
\ifdefined\lesshyphens \else \let\lesshyphens\relax \fi
@@ -553,13 +571,50 @@
\fi
\raggedcommand}
-% The keywords:
+% experiment
+
+\unexpanded\def\spac_align_use_later#1%
+ {\begingroup
+ \edef\m_spac_align_asked{#1}%
+ \expandafter\let\expandafter\raggedcommand\csname\??alignmentnormalcache\m_spac_align_asked\endcsname
+ \ifx\raggedcommand\relax
+ \spac_align_add_to_cache
+ \fi
+ \endgroup}
+
+\unexpanded\def\spac_align_use_now#1%
+ {\csname\??alignmentnormalcache#1\endcsname}
+
+% Maybe we need something different in columns.
\unexpanded\def\installalign#1#2% beware: commands must be unexpandable!
{\ifcsname\??aligncommand#1\endcsname \else
\setvalue{\??aligncommand#1}{\t_spac_align_collected\expandafter{\the\t_spac_align_collected#2}}%
\fi}
+% beware, toks stuff and states are set at a differt time, so installalign is
+% only for special options
+%
+% \setvalue{\??aligncommand whatever}%
+% {\c_spac_align_state_horizontal\plushundred
+% \t_spac_align_collected\expandafter{\the\t_spac_align_collected .....}}
+%
+% this one could deal with both
+%
+% \unexpanded\def\installalignoption#1#2%
+% {\ifcsname\??aligncommand#1\endcsname \else
+% \setvalue{\??aligncommand#1}%
+% {\spac_align_set_horizontal_none
+% \c_spac_align_state_horizontal\plushundred % don't set
+% \t_spac_align_collected\expandafter{\the\t_spac_align_collected#2}}%
+% \fi}
+%
+% \installalignoption
+% {whatever}
+% {}
+
+% The keywords:
+
\letvalue{\??aligncommand\empty }\empty
\setvalue{\??aligncommand\v!broad }{\c_spac_align_state_broad \plusone }
\setvalue{\??aligncommand\v!wide }{\c_spac_align_state_broad \plustwo }
@@ -959,19 +1014,59 @@
%
% \wordright[\rightskip]{whatever}
+% Trick posted by WS on mailing list, generalized a bit. The bottom text only shows
+% op when there is one line space available. We could add some extra space if needed.
+
+% \unexpanded\def\bottomword
+% {\par
+% \dowithnextbox
+% {\leaders\box\nextbox\vfil\page}
+% \vbox to \lineheight}
+%
+% \unexpanded\def\bottomword
+% {\par
+% \groupedcommand
+% {\leaders
+% \vbox to \lineheight\bgroup}
+% {\egroup
+% \vfil
+% \page}}
+
% \simplealignedbox{2cm}{right}{x}
\installcorenamespace{alignsimple}
-
-\setvalue{\??alignsimple\v!right }#1{{#1\hss}}
-\setvalue{\??alignsimple\v!left }#1{{\hss#1}}
-\setvalue{\??alignsimple\v!flushright}#1{{\hss#1}}
-\setvalue{\??alignsimple\v!flushleft }#1{{#1\hss}}
-\setvalue{\??alignsimple\v!middle }#1{{\hss#1\hss}}
+\installcorenamespace{alignsimplereverse}
+
+% todo: also handle \bgroup ... \egroup
+
+\unexpanded\def\spac_align_simple_left #1{{#1\hss}}
+\unexpanded\def\spac_align_simple_right #1{{\hss#1}}
+\unexpanded\def\spac_align_simple_middle#1{{\hss#1\hss}}
+
+\letvalue{\??alignsimple \v!right }\spac_align_simple_left
+\letvalue{\??alignsimple \v!outer }\spac_align_simple_left % not managed! see linenumbers
+\letvalue{\??alignsimple \v!flushleft }\spac_align_simple_left
+\letvalue{\??alignsimple \v!left }\spac_align_simple_right
+\letvalue{\??alignsimple \v!inner }\spac_align_simple_right % not managed! see linenumbers
+\letvalue{\??alignsimple \v!flushright}\spac_align_simple_right
+\letvalue{\??alignsimple \v!middle }\spac_align_simple_middle
+
+\letvalue{\??alignsimplereverse\v!right }\spac_align_simple_right
+\letvalue{\??alignsimplereverse\v!outer }\spac_align_simple_right % not managed! see linenumbers
+\letvalue{\??alignsimplereverse\v!flushleft }\spac_align_simple_right
+\letvalue{\??alignsimplereverse\v!left }\spac_align_simple_left
+\letvalue{\??alignsimplereverse\v!inner }\spac_align_simple_left % not managed! see linenumbers
+\letvalue{\??alignsimplereverse\v!flushright}\spac_align_simple_left
+\letvalue{\??alignsimplereverse\v!middle }\spac_align_simple_middle
\unexpanded\def\simplealignedbox#1#2%
{\hbox to #1\csname\??alignsimple\ifcsname\??alignsimple#2\endcsname#2\else\v!right\fi\endcsname}
+\newconditional\alignsimplelefttoright \settrue\alignsimplelefttoright
+
+\unexpanded\def\simplereversealignedbox#1#2%
+ {\hbox to #1\csname\??alignsimplereverse\ifcsname\??alignsimplereverse#2\endcsname#2\else\v!left\fi\endcsname}
+
% \installnamespace{alignsets}
%
% \setvalue{\??alignsets\v!right }#1#2{\let#1\relax\let#2\hss }
diff --git a/Master/texmf-dist/tex/context/base/spac-cha.mkiv b/Master/texmf-dist/tex/context/base/spac-cha.mkiv
deleted file mode 100644
index a07c8f198aa..00000000000
--- a/Master/texmf-dist/tex/context/base/spac-cha.mkiv
+++ /dev/null
@@ -1,191 +0,0 @@
-%D \module
-%D [ file=spac-cha, % was supp-ali,
-%D version=2012.06.08, % 2000.04.17,
-%D title=\CONTEXT\ Spacing Macros,
-%D subtitle=Character Alignment,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-%D Yet undocumented but nevertheless used.
-
-% 0 = centered
-% 1 = left in before
-% 2 = right in before
-% 3 = left in after
-% 4 = right in after
-
-\unprotect
-
-% \starttabulate[|cg{.}|cg{,}|cg{,}|]
-% \NC period \NC comma \NC comma \NC\NR
-% \NG 100.000,00 \NG 100.000,00 \NG 100,00 \NC\NR
-% \NG 10.000,00 \NG 10.000,00 \NG 1000,00 \NC\NR
-% \NG 100,00 \NG 100,00 \NG 10,00 \NC\NR
-% \NG 100,00 \NG 100,00 \NG 10,00 \NC\NR
-% \NG 10\\ \NG 10\\ \NG 0,00 \NC\NR
-% \NG 10 \NG 10 \NG 0,00 \NC\NR
-% \NG 10 \NG 10 \NG 0,00 \NC\NR
-% \stoptabulate
-
-% We gain not much by luafication and actually make things worse.
-
-\chardef\characteralignmentmode\plusfour
-\chardef\characteralignmentslot\plusone
-
-\let\afterassignwidth \!!zeropoint
-\let\beforeassignwidth\!!zeropoint
-
-\def\alignmentcharacter{.}
-
-\newdimen\d_supp_charalign_width
-\newtoks \t_supp_charalign_list
-
-\let\alignmentclass\s!default % can be used to handle multiple mixed ones
-
-\installcorenamespace{characteralign}
-
-\unexpanded\def\supp_charalign_push
- {\ifcsname\??characteralign\alignmentclass\endcsname\else
- \normalexpanded{\global\t_supp_charalign_list{\the\t_supp_charalign_list\supp_charalign_do{\alignmentclass}}}%
- \fi
- \setxvalue{\??characteralign\alignmentclass}{\supp_charalign_do
- {\afterassignwidth}{\beforeassignwidth}{\alignmentcharacter}}}
-
-\unexpanded\def\supp_charalign_pop_do#1#2#3%
- {\def\afterassignwidth {#1}%
- \def\beforeassignwidth {#2}%
- \def\alignmentcharacter{#3}}
-
-\unexpanded\def\supp_charalign_pop
- {\let\supp_charalign_do\supp_charalign_pop_do
- \executeifdefined{\??characteralign\alignmentclass}\donothing}
-
-\unexpanded\def\supp_charalign_reset_do#1%
- {\global\letbeundefined{\??characteralign#1}} % global !
-
-\unexpanded\def\resetcharacteralign
- {\let\supp_charalign_do\supp_charalign_reset_do
- \the\t_supp_charalign_list
- \global\t_supp_charalign_list\emptytoks}
-
-\unexpanded\def\supp_charalign_firstpass_one#1#2%
- {\supp_charalign_pop
- \let\\\empty
- \setbox\scratchbox\hbox{#1}%
- \d_supp_charalign_width\wd\scratchbox
- \setbox\scratchbox\emptyhbox
- \supp_charalign_check#2#1\relax\relax
- \scratchdimen-\wd\scratchbox
- \setbox\scratchbox\hbox{\ignorespaces#2\unskip}%
- \advance\scratchdimen \wd\scratchbox
- \ifdim\scratchdimen>\beforeassignwidth\relax
- \edef\beforeassignwidth{\the\scratchdimen}%
- \fi
- \ifdim\scratchdimen=\zeropoint
- \setbox\scratchbox\hbox{\ignorespaces#2\unskip}%
- \scratchdimen\wd\scratchbox
- \ifcase\characteralignmentmode
- % do nothing
- \else\ifnum\characteralignmentmode<\plusthree
- \advance\scratchdimen\d_supp_charalign_width\relax
- \ifdim\scratchdimen>\beforeassignwidth\relax
- \edef\beforeassignwidth{\the\scratchdimen}%
- \fi
- \else
- \ifdim\scratchdimen>\afterassignwidth\relax
- \edef\afterassignwidth{\the\scratchdimen}%
- \fi
- \fi\fi
- \fi
- \supp_charalign_push}
-
-\unexpanded\def\supp_charalign_firstpass_two#1#2#3%
- {\ifx#2\relax
- \setbox\scratchbox\hbox{\ignorespaces#1\unskip}%
- \ifdim\wd\scratchbox>\afterassignwidth
- \edef\afterassignwidth{\the\wd\scratchbox}%
- \fi
- \else
- \supp_charalign_check#2#3\relax\relax
- \fi}
-
-\unexpanded\def\supp_charalign_secondpass_one#1#2%
- {\supp_charalign_pop
- \let\\\empty % beware, no grouping
- \setbox\scratchbox\hbox{#1}%
- \d_supp_charalign_width\wd\scratchbox
- \setbox\scratchbox\emptyhbox
- % new 12,34 vs 10\\ where 10 aligns on 12 if #1 = ,
- \ifcase\characteralignmentslot
- \supp_charalign_check#2#1\relax\relax
- \scratchdimen\wd\scratchbox
- \setbox\scratchbox\hbox{\ignorespaces##1\unskip}%
- \else
- \def\\{#1}%
- \normalexpanded{\supp_charalign_check#2#1\relax\relax}%
- \scratchdimen\wd\scratchbox
- \setbox\scratchbox\hbox{\def\\{\hphantom{#1}}\ignorespaces#2\unskip}%
- \fi
- \noindent
- \ifdim\scratchdimen=\wd\scratchbox
- \ifcase\characteralignmentmode
- \box\scratchbox
- \else
- \hbox
- {\dontcomplain
- \hbox to \beforeassignwidth
- {\ifcase\characteralignmentmode\or
- \box\scratchbox\hss
- \or
- \hss\box\scratchbox\hskip\d_supp_charalign_width
- \or
- \hss\rlap{\box\scratchbox}%
- \or
- \hss\rlap{\hbox to \afterassignwidth{\hss\box\scratchbox}}%
- \fi}%
- \hskip\afterassignwidth}%
- \fi
- \else
- \hbox
- {\hbox to \beforeassignwidth
- {\hss\box\scratchbox\hskip-\scratchdimen}%
- \hskip\afterassignwidth}%
- \fi}
-
-\unexpanded\def\supp_charalign_secondpass_two#1#2#3%
- {\ifx#2\relax
- \setbox\scratchbox\hbox{\ignorespaces#1\unskip}%
- \else
- \supp_charalign_check#2#3\relax\relax
- \fi}
-
-\unexpanded\def\supp_charalign_firstpass#1%
- {\unexpanded\def\checkalignment ##1{\supp_charalign_firstpass_one{#1}{##1}}%
- \unexpanded\def\supp_charalign_check##1#1##2##3\relax{\supp_charalign_firstpass_two{##1}{##2}{##3}}}
-
-\unexpanded\def\supp_charalign_secondpass#1%
- {\unexpanded\def\checkalignment ##1{\supp_charalign_secondpass_one{#1}{##1}}%
- \unexpanded\def\supp_charalign_check##1#1##2##3\relax{\supp_charalign_secondpass_two{##1}{##2}{##3}}}
-
-\unexpanded\def\setfirstpasscharacteralign
- {\supp_charalign_pop
- \normalexpanded{\supp_charalign_firstpass{\alignmentcharacter}}}
-
-\unexpanded\def\setsecondpasscharacteralign
- {\supp_charalign_pop
- \normalexpanded{\supp_charalign_secondpass{\alignmentcharacter}}}
-
-\unexpanded\def\startcharacteralign#1\stopcharacteralign
- {\bgroup
- \setfirstpasscharacteralign #1%
- \setsecondpasscharacteralign#1%
- \egroup}
-
-\let\stopcharacteralign\relax
-
-\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/spac-chr.lua b/Master/texmf-dist/tex/context/base/spac-chr.lua
index 24364978a3b..1abba350ad8 100644
--- a/Master/texmf-dist/tex/context/base/spac-chr.lua
+++ b/Master/texmf-dist/tex/context/base/spac-chr.lua
@@ -14,19 +14,39 @@ local byte, lower = string.byte, string.lower
-- to be redone: characters will become tagged spaces instead as then we keep track of
-- spaceskip etc
+-- todo: only setattr when export
+
+local next = next
+
trace_characters = false trackers.register("typesetters.characters", function(v) trace_characters = v end)
report_characters = logs.reporter("typesetting","characters")
local nodes, node = nodes, node
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove -- ! nodes
-local copy_node_list = node.copy_list
+local nuts = nodes.nuts
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+
+local insert_node_after = nuts.insert_after
+local remove_node = nuts.remove
+local copy_node_list = nuts.copy_list
+local traverse_id = nuts.traverse_id
-local nodepool = nodes.pool
local tasks = nodes.tasks
+local nodepool = nuts.pool
local new_penalty = nodepool.penalty
local new_glue = nodepool.glue
@@ -41,7 +61,9 @@ local chardata = characters.data
local typesetters = typesetters
-local characters = { }
+local unicodeblocks = characters.blocks
+
+local characters = typesetters.characters or { } -- can be predefined
typesetters.characters = characters
local fonthashes = fonts.hashes
@@ -49,6 +71,8 @@ local fontparameters = fonthashes.parameters
local fontcharacters = fonthashes.characters
local fontquads = fonthashes.quads
+local setmetatableindex = table.setmetatableindex
+
local a_character = attributes.private("characters")
local a_alignstate = attributes.private("alignstate")
@@ -56,60 +80,106 @@ local c_zero = byte('0')
local c_period = byte('.')
local function inject_quad_space(unicode,head,current,fraction)
- local attr = current.attr
+ local attr = getfield(current,"attr")
if fraction ~= 0 then
- fraction = fraction * fontquads[current.font]
+ fraction = fraction * fontquads[getfont(current)]
end
local glue = new_glue(fraction)
--- glue.attr = copy_node_list(attr)
- glue.attr = attr
- current.attr = nil
- glue[a_character] = unicode
+ setfield(glue,"attr",attr)
+ setfield(current,"attr",nil)
+ setattr(glue,a_character,unicode)
head, current = insert_node_after(head,current,glue)
return head, current
end
local function inject_char_space(unicode,head,current,parent)
- local attr = current.attr
- local font = current.font
+ local attr = getfield(current,"attr")
+ local font = getfont(current)
local char = fontcharacters[font][parent]
local glue = new_glue(char and char.width or fontparameters[font].space)
- -- glue.attr = copy_node_list(current.attr)
- glue.attr = current.attr
- current.attr = nil
- glue[a_character] = unicode
+ setfield(glue,"attr",attr)
+ setfield(current,"attr",nil)
+ setattr(glue,a_character,unicode)
head, current = insert_node_after(head,current,glue)
return head, current
end
local function inject_nobreak_space(unicode,head,current,space,spacestretch,spaceshrink)
- local attr = current.attr
+ local attr = getfield(current,"attr")
local glue = new_glue(space,spacestretch,spaceshrink)
local penalty = new_penalty(10000)
- -- glue.attr = copy_node_list(attr)
- glue.attr = attr
- current.attr = nil
- -- penalty.attr = attr
- glue[a_character] = unicode
+ setfield(glue,"attr",attr)
+ setfield(current,"attr",nil)
+ setattr(glue,a_character,unicode)
head, current = insert_node_after(head,current,penalty)
head, current = insert_node_after(head,current,glue)
return head, current
end
+local function nbsp(head,current)
+ local para = fontparameters[getfont(current)]
+ if getattr(current,a_alignstate) == 1 then -- flushright
+ head, current = inject_nobreak_space(0x00A0,head,current,para.space,0,0)
+ setfield(current,"subtype",space_skip_code)
+ else
+ head, current = inject_nobreak_space(0x00A0,head,current,para.space,para.spacestretch,para.spaceshrink)
+ end
+ return head, current
+end
+
+-- assumes nuts or nodes, depending on callers .. so no tonuts here
+
+function characters.replacenbsp(head,original)
+ local head, current = nbsp(head,original)
+ head = remove_node(head,original,true)
+ return head, current
+end
+
+function characters.replacenbspaces(head)
+ for current in traverse_id(glyph_code,head) do
+ if getchar(current) == 0x00A0 then
+ local h = nbsp(head,current)
+ if h then
+ head = remove_node(h,current,true)
+ end
+ end
+ end
+ return head
+end
+
+-- This initialization might move someplace else if we need more of it. The problem is that
+-- this module depends on fonts so we have an order problem.
+
+local nbsphash = { } setmetatableindex(nbsphash,function(t,k)
+ for i=unicodeblocks.devanagari.first,unicodeblocks.devanagari.last do nbsphash[i] = true end
+ for i=unicodeblocks.kannada .first,unicodeblocks.kannada .last do nbsphash[i] = true end
+ setmetatableindex(nbsphash,nil)
+ return nbsphash[k]
+end)
+
local methods = {
-- The next one uses an attribute assigned to the character but still we
-- don't have the 'local' value.
[0x00A0] = function(head,current) -- nbsp
- local para = fontparameters[current.font]
- if current[a_alignstate] == 1 then -- flushright
- head, current = inject_nobreak_space(0x00A0,head,current,para.space,0,0)
- current.subtype = space_skip_code
- else
- head, current = inject_nobreak_space(0x00A0,head,current,para.space,para.spacestretch,para.spaceshrink)
+ local next = getnext(current)
+ if next and getid(next) == glyph_code then
+ local char = getchar(next)
+ if char == 0x200C or char == 0x200D then -- nzwj zwj
+ next = getnext(next)
+ if next and nbsphash[getchar(next)] then
+ return false
+ end
+ elseif nbsphash[char] then
+ return false
+ end
+ end
+ local prev = getprev(current)
+ if prev and getid(prev) == glyph_code and nbsphash[getchar(prev)] then
+ return false
end
- return head, current
+ return nbsp(head,current)
end,
[0x2000] = function(head,current) -- enquad
@@ -161,11 +231,11 @@ local methods = {
end,
[0x202F] = function(head,current) -- narrownobreakspace
- return inject_nobreak_space(0x202F,head,current,fontquads[current.font]/8)
+ return inject_nobreak_space(0x202F,head,current,fontquads[getfont(current)]/8)
end,
[0x205F] = function(head,current) -- math thinspace
- return inject_nobreak_space(0x205F,head,current,fontparameters[current.font].space/8)
+ return inject_nobreak_space(0x205F,head,current,fontparameters[getfont(current)].space/8)
end,
-- [0xFEFF] = function(head,current) -- zerowidthnobreakspace
@@ -174,27 +244,30 @@ local methods = {
}
-function characters.handler(head)
+function characters.handler(head) -- todo: use traverse_id
+ head = tonut(head)
local current = head
local done = false
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local next = current.next
- local char = current.char
+ local next = getnext(current)
+ local char = getchar(current)
local method = methods[char]
if method then
if trace_characters then
report_characters("replacing character %C, description %a",char,lower(chardata[char].description))
end
- head = method(head,current)
- head = remove_node(head,current,true)
+ local h = method(head,current)
+ if h then
+ head = remove_node(h,current,true)
+ end
done = true
end
current = next
else
- current = current.next
+ current = getnext(current)
end
end
- return head, done
+ return tonode(head), done
end
diff --git a/Master/texmf-dist/tex/context/base/spac-chr.mkiv b/Master/texmf-dist/tex/context/base/spac-chr.mkiv
index 0b6ebe0a9ff..54a25be34a8 100644
--- a/Master/texmf-dist/tex/context/base/spac-chr.mkiv
+++ b/Master/texmf-dist/tex/context/base/spac-chr.mkiv
@@ -68,13 +68,16 @@
\edef\breakablethinspace {\normalUchar"2009} % quad/8
\edef\hairspace {\normalUchar"200A} % quad/8
\edef\zerowidthspace {\normalUchar"200B} % 0
-\edef\zwnj {\normalUchar"200C} % 0
-\edef\zwj {\normalUchar"200D} % 0
+\edef\zerowidthnonjoiner {\normalUchar"200C} % 0
+\edef\zerowidthjoiner {\normalUchar"200D} % 0
\edef\narrownobreakspace {\normalUchar"202F} % quad/8
% % "205F % space/8 (math)
% \zerowidthnobreakspace {\normalUchar"FEFF}
\udef\zerowidthnobreakspace {\penalty\plustenthousand\kern\zeropoint}
+\let\zwnj\zerowidthnonjoiner
+\let\zwj \zerowidthjoiner
+
% Shortcuts:
% unexpanded as otherwise we need to intercept / cleanup a lot
diff --git a/Master/texmf-dist/tex/context/base/spac-hor.lua b/Master/texmf-dist/tex/context/base/spac-hor.lua
index 09920bd4647..c9d6e2b154b 100644
--- a/Master/texmf-dist/tex/context/base/spac-hor.lua
+++ b/Master/texmf-dist/tex/context/base/spac-hor.lua
@@ -8,6 +8,10 @@ if not modules then modules = { } end modules ['spac-hor'] = {
local match = string.match
local utfbyte = utf.byte
+
+local context = context
+local commands = commands
+
local chardata = characters.data
local can_have_space = table.tohash {
diff --git a/Master/texmf-dist/tex/context/base/spac-hor.mkiv b/Master/texmf-dist/tex/context/base/spac-hor.mkiv
index 2ce502fb5f5..92491ce3218 100644
--- a/Master/texmf-dist/tex/context/base/spac-hor.mkiv
+++ b/Master/texmf-dist/tex/context/base/spac-hor.mkiv
@@ -32,7 +32,7 @@
{\doifoutervmode{\ifconditional\c_spac_indentation_indent_first\else\spac_indentation_variant_no\fi}}
\unexpanded\def\setupindenting
- {\doifnextoptionalelse\spac_indentation_setup_options\spac_indentation_setup_size}
+ {\doifnextoptionalcselse\spac_indentation_setup_options\spac_indentation_setup_size}
\unexpanded\def\spac_indentation_setup_size
{\assigndimension\v_spac_indentation_current\d_spac_indentation_par{1\emwidth}{1.5\emwidth}{2\emwidth}}
@@ -64,24 +64,65 @@
\def\spac_indentation_set_everypar
{\everypar{\checkindentation}}
+% \def\spac_indentation_apply_step_one#1%
+% {\ifcsname\??indentingmethod#1\endcsname
+% % case two
+% \else
+% \edef\v_spac_indentation_current{#1}% single entry in list
+% \let\normalindentation\v_spac_indentation_current
+% \spac_indentation_setup_size
+% \fi}
+%
+% \def\spac_indentation_apply_step_two#1%
+% {\ifcsname\??indentingmethod#1\endcsname
+% \csname\??indentingmethod#1\endcsname
+% \else
+% % case one
+% \fi}
+
+% \defineindenting[whatever][yes,2cm]
+% %defineindenting[whatever][yes,-2cm]
+%
+% \setupindenting[yes,-2em] \input ward \par
+% \setupindenting[yes,2em] \input ward \par
+% \setupindenting[whatever] \input ward \par
+
+\installcorenamespace {indentingpreset}
+
+\unexpanded\def\defineindenting
+ {\dodoubleargument\spac_indenting_define}
+
+\def\spac_indenting_define[#1][#2]% todo: mixes
+ {\setevalue{\??indentingpreset#1}{#2}}
+
+\def\spac_indentation_apply_step_one_nested#1%
+ {\expandafter\processcommacommand\expandafter[\csname\??indentingpreset#1\endcsname]\spac_indentation_apply_step_one}
+
+\def\spac_indentation_apply_step_two_nested#1%
+ {\expandafter\processcommacommand\expandafter[\csname\??indentingpreset#1\endcsname]\spac_indentation_apply_step_two}
+
\def\spac_indentation_apply_step_one#1%
- {\ifcsname\??indentingmethod#1\endcsname
+ {\ifcsname\??indentingpreset#1\endcsname
+ \spac_indentation_apply_step_one_nested{#1}%
+ \else\ifcsname\??indentingmethod#1\endcsname
% case two
\else
\edef\v_spac_indentation_current{#1}% single entry in list
\let\normalindentation\v_spac_indentation_current
\spac_indentation_setup_size
- \fi}
+ \fi\fi}
\def\spac_indentation_apply_step_two#1%
- {\ifcsname\??indentingmethod#1\endcsname
+ {\ifcsname\??indentingpreset#1\endcsname
+ \spac_indentation_apply_step_two_nested{#1}%
+ \else\ifcsname\??indentingmethod#1\endcsname
\csname\??indentingmethod#1\endcsname
\else
% case one
- \fi}
+ \fi\fi}
\unexpanded\def\indenting % kind of obsolete
- {\doifnextoptionalelse\spac_indentation_setup_options\relax}
+ {\doifnextoptionalcselse\spac_indentation_setup_options\relax}
% use \noindentation to suppress next indentation
@@ -339,7 +380,7 @@
\installspacingmethod \v!broad {\nonfrenchspacing} % more depending on what punctuation
\unexpanded\def\setupspacing
- {\doifnextoptionalelse\spac_spacecodes_setup_yes\spac_spacecodes_setup_nop}
+ {\doifnextoptionalcselse\spac_spacecodes_setup_yes\spac_spacecodes_setup_nop}
\def\spac_spacecodes_setup_yes[#1]%
{\csname\??spacecodemethod#1\endcsname
@@ -429,7 +470,8 @@
\hskip\wd\scratchbox\relax}
\unexpanded\def\fixedspaces
- {\letcatcodecommand \ctxcatcodes \tildeasciicode \fixedspace}
+ {\letcatcodecommand \ctxcatcodes \tildeasciicode\fixedspace
+ \let~\fixedspace} % we need to renew it
\appendtoks
\let~\space
@@ -486,17 +528,17 @@
\ifdefined\thinspace \else
- \def\thinspace {\kern .16667\emwidth}
- \def\negthinspace{\kern-.16667\emwidth}
- \def\enspace {\kern .5\emwidth}
+ \unexpanded\def\thinspace {\kern .16667\emwidth}
+ \unexpanded\def\negthinspace{\kern-.16667\emwidth}
+ \unexpanded\def\enspace {\kern .5\emwidth}
\fi
\ifdefined\quad \else
- \def\enskip{\hskip.5\emwidth}
- \def\quad {\hskip \emwidth}
- \def\qquad {\hskip 2\emwidth}
+ \unexpanded\def\enskip{\hskip.5\emwidth}
+ \unexpanded\def\quad {\hskip \emwidth}
+ \unexpanded\def\qquad {\hskip 2\emwidth}
\fi
@@ -613,14 +655,51 @@
{\begingroup
\dodoubleempty\spac_narrower_start_named}
-\def\spac_narrower_start_named[#1][#2]%
- {\edef\currentnarrower{#1}%
- \ifsecondargument
- \spac_narrower_start_apply{#2}%
+% \def\spac_narrower_start_named[#1][#2]%
+% {\edef\currentnarrower{#1}%
+% \ifsecondargument
+% \spac_narrower_start_apply{#2}%
+% \else
+% \spac_narrower_start_apply{\narrowerparameter\v!default}%
+% \fi}
+
+\def\spac_narrower_start_named
+ {\ifsecondargument
+ \expandafter\spac_narrower_start_named_two
\else
- \spac_narrower_start_apply{\narrowerparameter\v!default}%
+ \expandafter\spac_narrower_start_named_one
\fi}
+\def\spac_narrower_start_named_one[#1]%
+ {\doifassignmentelse{#1}\spac_narrower_start_named_one_yes\spac_narrower_start_named_one_nop[#1]}
+
+\def\spac_narrower_start_named_one_yes[#1][#2]% [settings] []
+ {\setupcurrentnarrower[#1]%
+ \spac_narrower_start_apply{\narrowerparameter\v!default}}
+
+\def\spac_narrower_start_named_one_nop[#1][#2]% [tag] []
+ {\edef\currentnarrower{#1}%
+ \spac_narrower_start_apply{\narrowerparameter\v!default}}
+
+\def\spac_narrower_start_named_two[#1]%
+ {\doifassignmentelse{#1}\spac_narrower_start_named_settings_how\spac_narrower_start_named_tag_unknown[#1]}
+
+\def\spac_narrower_start_named_settings_how[#1][#2]% [settings] [how]
+ {\setupcurrentnarrower[#1]%
+ \spac_narrower_start_apply{#2}}
+
+\def\spac_narrower_start_named_tag_unknown[#1][#2]% [tag] [...]
+ {\doifassignmentelse{#2}\spac_narrower_start_named_tag_settings\spac_narrower_start_named_tag_how[#1][#2]}
+
+\def\spac_narrower_start_named_tag_settings[#1][#2]% [tag] [settings]
+ {\edef\currentnarrower{#1}%
+ \setupcurrentnarrower[#2]%
+ \spac_narrower_start_apply{\narrowerparameter\v!default}}
+
+\def\spac_narrower_start_named_tag_how[#1][#2]% [tag] [how]
+ {\edef\currentnarrower{#1}%
+ \spac_narrower_start_apply{#2}}
+
\let\stopnarrow\spac_narrower_stop
\newdimen\d_spac_effective_hsize \def\effectivehsize {\hsize}
@@ -1021,7 +1100,7 @@
%D A rather unknown one:
\unexpanded\def\widened % moved from cont-new
- {\doifnextoptionalelse\spac_widened_yes\spac_widened_nop}
+ {\doifnextoptionalcselse\spac_widened_yes\spac_widened_nop}
\def\spac_widened_yes[#1]#2{\hbox \s!spread #1{\hss#2\hss}}
\def\spac_widened_nop #1{\hbox \s!spread \emwidth{\hss#1\hss}}
diff --git a/Master/texmf-dist/tex/context/base/spac-par.mkiv b/Master/texmf-dist/tex/context/base/spac-par.mkiv
index b6116eb018f..825cdca462b 100644
--- a/Master/texmf-dist/tex/context/base/spac-par.mkiv
+++ b/Master/texmf-dist/tex/context/base/spac-par.mkiv
@@ -185,23 +185,50 @@
\let\flushpostponednodedata\relax % hook into everypar
-\newbox\b_spac_postponed_data
+\newbox \b_spac_postponed_data
+%newcount\c_spac_postponed_data
+
+% \installcorenamespace {postponednodesstack}
+%
+% \initializeboxstack\??postponednodesstack
+%
+% \unexpanded\def\pushpostponednodedata
+% {\global\advance\c_spac_postponed_data\plusone
+% \savebox\??postponednodesstack{\the\c_spac_postponed_data}{\box\b_spac_postponed_data}}
+%
+% \unexpanded\def\poppostponednodedata
+% {\global\setbox\b_spac_postponed_data\hbox{\foundbox\??postponednodesstack{\the\c_spac_postponed_data}}%
+% \global\advance\c_spac_postponed_data\minusone
+% \ifvoid\b_spac_postponed_data\else
+% \glet\flushpostponednodedata\spac_postponed_data_flush
+% \fi}
+
+\unexpanded\def\pushpostponednodedata
+ {\globalpushbox\b_spac_postponed_data}
+
+\unexpanded\def\poppostponednodedata
+ {\globalpopbox\b_spac_postponed_data
+ \ifvoid\b_spac_postponed_data\else
+ \glet\flushpostponednodedata\spac_postponed_data_flush
+ \fi}
\unexpanded\def\flushatnextpar
{\begingroup
- \glet\flushpostponednodedata\spac_postponed_flush
- \dowithnextboxcs\spac_postponed_finish\hbox}
+ \glet\flushpostponednodedata\spac_postponed_data_flush
+ \dowithnextboxcs\spac_postponed_data_finish\hbox}
-\def\spac_postponed_finish
+\def\spac_postponed_data_finish
{\global\setbox\b_spac_postponed_data\hbox % to\zeropoint
{\box\b_spac_postponed_data\box\nextbox}%
\endgroup}
-\def\spac_postponed_flush
- {\ifvoid\b_spac_postponed_data\else
- \hbox{\smashedbox\b_spac_postponed_data}% \box\b_spac_postponed_data
- \fi
- \glet\flushpostponednodedata\relax}
+\def\spac_postponed_data_flush
+ {%\iftrialtypesetting \else
+ \ifvoid\b_spac_postponed_data\else
+ \hbox{\smashedbox\b_spac_postponed_data}% \box\b_spac_postponed_data
+ \fi
+ \glet\flushpostponednodedata\relax
+ }%\fi}
\unexpanded\def\doflushatpar
{\ifvmode
diff --git a/Master/texmf-dist/tex/context/base/spac-ver.lua b/Master/texmf-dist/tex/context/base/spac-ver.lua
index 7d030ab1ac2..3f1fd5c82a4 100644
--- a/Master/texmf-dist/tex/context/base/spac-ver.lua
+++ b/Master/texmf-dist/tex/context/base/spac-ver.lua
@@ -8,7 +8,8 @@ if not modules then modules = { } end modules ['spac-ver'] = {
-- we also need to call the spacer for inserts!
--- todo: directly set skips
+-- todo: use lua nodes with lua data (>0.79)
+-- see ** can go when 0.79
-- this code dates from the beginning and is kind of experimental; it
-- will be optimized and improved soon
@@ -23,8 +24,7 @@ if not modules then modules = { } end modules ['spac-ver'] = {
local next, type, tonumber = next, type, tonumber
local gmatch, concat = string.gmatch, table.concat
-local ceil, floor, max, min, round, abs = math.ceil, math.floor, math.max, math.min, math.round, math.abs
-local texlists, texdimen, texbox = tex.lists, tex.dimen, tex.box
+local ceil, floor = math.ceil, math.floor
local lpegmatch = lpeg.match
local unpack = unpack or table.unpack
local allocate = utilities.storage.allocate
@@ -33,12 +33,13 @@ local formatters = string.formatters
local P, C, R, S, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc
-local nodes, node, trackers, attributes, context = nodes, node, trackers, attributes, context
+local nodes, node, trackers, attributes, context, commands, tex = nodes, node, trackers, attributes, context, commands, tex
-local variables = interfaces.variables
+local texlists = tex.lists
+local texgetdimen = tex.getdimen
+local texnest = tex.nest
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
+local variables = interfaces.variables
-- vertical space handler
@@ -62,23 +63,41 @@ local a_skiporder = attributes.private('skiporder')
local a_snapmethod = attributes.private('snapmethod')
local a_snapvbox = attributes.private('snapvbox')
-local find_node_tail = node.tail
-local free_node = node.free
-local free_node_list = node.flush_list
-local copy_node = node.copy
-local traverse_nodes = node.traverse
-local traverse_nodes_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove
-local count_nodes = nodes.count
-local nodeidstostring = nodes.idstostring
-local hpack_node = node.hpack
-local vpack_node = node.vpack
-local writable_spec = nodes.writable_spec
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+local ntostring = nuts.tostring
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
+local getbox = nuts.getbox
+
+local find_node_tail = nuts.tail
+local free_node = nuts.free
+local free_node_list = nuts.flush_list
+local copy_node = nuts.copy
+local traverse_nodes = nuts.traverse
+local traverse_nodes_id = nuts.traverse_id
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local remove_node = nuts.remove
+local count_nodes = nuts.count
+local hpack_node = nuts.hpack
+local vpack_node = nuts.vpack
+local writable_spec = nuts.writable_spec
+local nodereference = nuts.reference
+
local listtoutf = nodes.listtoutf
+local nodeidstostring = nodes.idstostring
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_penalty = nodepool.penalty
local new_kern = nodepool.kern
@@ -87,7 +106,7 @@ local new_gluespec = nodepool.gluespec
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
-local fillcodes = nodes.fillcodes
+local whatsitcodes = nodes.whatsitcodes
local penalty_code = nodecodes.penalty
local kern_code = nodecodes.kern
@@ -96,16 +115,14 @@ local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local whatsit_code = nodecodes.whatsit
-local userskip_code = skipcodes.userskip
-
local vspacing = builders.vspacing or { }
builders.vspacing = vspacing
local vspacingdata = vspacing.data or { }
vspacing.data = vspacingdata
-vspacingdata.snapmethods = vspacingdata.snapmethods or { }
-local snapmethods = vspacingdata.snapmethods --maybe some older code can go
+local snapmethods = vspacingdata.snapmethods or { }
+vspacingdata.snapmethods = snapmethods
storage.register("builders/vspacing/data/snapmethods", snapmethods, "builders.vspacing.data.snapmethods")
@@ -180,28 +197,26 @@ end
-- local rule_id = nodecodes.rule
-- local vlist_id = nodecodes.vlist
-- function nodes.makevtop(n)
--- if n.id == vlist_id then
--- local list = n.list
--- local height = (list and list.id <= rule_id and list.height) or 0
--- n.depth = n.depth - height + n.height
--- n.height = height
+-- if getid(n) == vlist_id then
+-- local list = getlist(n)
+-- local height = (list and getid(list) <= rule_id and getfield(list,"height")) or 0
+-- setfield(n,"depth",getfield(n,"depth") - height + getfield(n,"height")
+-- setfield(n,"height",height
-- end
-- end
-local reference = nodes.reference
-
local function validvbox(parentid,list)
if parentid == hlist_code then
- local id = list.id
+ local id = getid(list)
if id == whatsit_code then -- check for initial par subtype
- list = list.next
+ list = getnext(list)
if not next then
return nil
end
end
local done = nil
for n in traverse_nodes(list) do
- local id = n.id
+ local id = getid(n)
if id == vlist_code or id == hlist_code then
if done then
return nil
@@ -215,9 +230,9 @@ local function validvbox(parentid,list)
end
end
if done then
- local id = done.id
+ local id = getid(done)
if id == hlist_code then
- return validvbox(id,done.list)
+ return validvbox(id,getlist(done))
end
end
return done -- only one vbox
@@ -227,19 +242,19 @@ end
local function already_done(parentid,list,a_snapmethod) -- todo: done when only boxes and all snapped
-- problem: any snapped vbox ends up in a line
if list and parentid == hlist_code then
- local id = list.id
+ local id = getid(list)
if id == whatsit_code then -- check for initial par subtype
- list = list.next
+ list = getnext(list)
if not next then
return false
end
end
--~ local i = 0
for n in traverse_nodes(list) do
- local id = n.id
---~ i = i + 1 print(i,nodecodes[id],n[a_snapmethod])
+ local id = getid(n)
+--~ i = i + 1 print(i,nodecodes[id],getattr(n,a_snapmethod))
if id == hlist_code or id == vlist_code then
- local a = n[a_snapmethod]
+ local a = getattr(n,a_snapmethod)
if not a then
-- return true -- not snapped at all
elseif a == 0 then
@@ -277,31 +292,35 @@ end
-- check variables.none etc
local function snap_hlist(where,current,method,height,depth) -- method.strut is default
- local list = current.list
+ local list = getlist(current)
local t = trace_vsnapping and { }
if t then
- t[#t+1] = formatters["list content: %s"](nodes.toutf(list))
- t[#t+1] = formatters["parent id: %s"](reference(current))
+ t[#t+1] = formatters["list content: %s"](listtoutf(list))
+ t[#t+1] = formatters["parent id: %s"](nodereference(current))
t[#t+1] = formatters["snap method: %s"](method.name)
t[#t+1] = formatters["specification: %s"](method.specification)
end
local snapht, snapdp
if method["local"] then
-- snapping is done immediately here
- snapht, snapdp = texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth
+ snapht = texgetdimen("bodyfontstrutheight")
+ snapdp = texgetdimen("bodyfontstrutdepth")
if t then
t[#t+1] = formatters["local: snapht %p snapdp %p"](snapht,snapdp)
end
elseif method["global"] then
- snapht, snapdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth
+ snapht = texgetdimen("globalbodyfontstrutheight")
+ snapdp = texgetdimen("globalbodyfontstrutdepth")
if t then
t[#t+1] = formatters["global: snapht %p snapdp %p"](snapht,snapdp)
end
else
-- maybe autolocal
-- snapping might happen later in the otr
- snapht, snapdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth
- local lsnapht, lsnapdp = texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth
+ snapht = texgetdimen("globalbodyfontstrutheight")
+ snapdp = texgetdimen("globalbodyfontstrutdepth")
+ local lsnapht = texgetdimen("bodyfontstrutheight")
+ local lsnapdp = texgetdimen("bodyfontstrutdepth")
if snapht ~= lsnapht and snapdp ~= lsnapdp then
snapht, snapdp = lsnapht, lsnapdp
end
@@ -309,7 +328,8 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
t[#t+1] = formatters["auto: snapht %p snapdp %p"](snapht,snapdp)
end
end
- local h, d = height or current.height, depth or current.depth
+ local h = height or getfield(current,"height")
+ local d = depth or getfield(current,"depth")
local hr, dr, ch, cd = method.hfraction or 1, method.dfraction or 1, h, d
local tlines, blines = method.tlines or 1, method.blines or 1
local done, plusht, plusdp = false, snapht, snapdp
@@ -336,19 +356,22 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
if method.first then
local thebox = current
- local id = thebox.id
+ local id = getid(thebox)
if id == hlist_code then
- thebox = validvbox(id,thebox.list)
- id = thebox and thebox.id
+ thebox = validvbox(id,getlist(thebox))
+ id = thebox and getid(thebox)
end
if thebox and id == vlist_code then
- local list, lh, ld = thebox.list
+ local list = getlist(thebox)
+ local lh, ld
for n in traverse_nodes_id(hlist_code,list) do
- lh, ld = n.height, n.depth
+ lh = getfield(n,"height")
+ ld = getfield(n,"depth")
break
end
if lh then
- local ht, dp = thebox.height, thebox.depth
+ local ht = getfield(thebox,"height")
+ local dp = getfield(thebox,"depth")
if t then
t[#t+1] = formatters["first line: height %p depth %p"](lh,ld)
t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp)
@@ -356,9 +379,9 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
local delta = h - lh
ch, cd = lh, delta + d
h, d = ch, cd
- local shifted = hpack_node(current.list)
- shifted.shift = delta
- current.list = shifted
+ local shifted = hpack_node(getlist(current))
+ setfield(shifted,"shift",delta)
+ setfield(current,"list",shifted)
done = true
if t then
t[#t+1] = formatters["first: height %p depth %p shift %p"](ch,cd,delta)
@@ -371,18 +394,21 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
end
elseif method.last then
local thebox = current
- local id = thebox.id
+ local id = getid(thebox)
if id == hlist_code then
- thebox = validvbox(id,thebox.list)
- id = thebox and thebox.id
+ thebox = validvbox(id,getlist(thebox))
+ id = thebox and getid(thebox)
end
if thebox and id == vlist_code then
- local list, lh, ld = thebox.list
+ local list = getlist(thebox)
+ local lh, ld
for n in traverse_nodes_id(hlist_code,list) do
- lh, ld = n.height, n.depth
+ lh = getfield(n,"height")
+ ld = getfield(n,"depth")
end
if lh then
- local ht, dp = thebox.height, thebox.depth
+ local ht = getfield(thebox,"height")
+ local dp = getfield(thebox,"depth")
if t then
t[#t+1] = formatters["last line: height %p depth %p" ](lh,ld)
t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp)
@@ -390,9 +416,9 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
local delta = d - ld
cd, ch = ld, delta + h
h, d = ch, cd
- local shifted = hpack_node(current.list)
- shifted.shift = delta
- current.list = shifted
+ local shifted = hpack_node(getlist(current))
+ setfield(shifted,"shift",delta)
+ setfield(current,"list",shifted)
done = true
if t then
t[#t+1] = formatters["last: height %p depth %p shift %p"](ch,cd,delta)
@@ -453,25 +479,25 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
if offset then
-- we need to set the attr
if t then
- t[#t+1] = formatters["before offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth)
+ t[#t+1] = formatters["before offset: %p (width %p height %p depth %p)"](offset,getfield(current,"width"),getfield(current,"height"),getfield(current,"depth"))
end
- local shifted = hpack_node(current.list)
- shifted.shift = offset
- current.list = shifted
+ local shifted = hpack_node(getlist(current))
+ setfield(shifted,"shift",offset)
+ setfield(current,"list",shifted)
if t then
- t[#t+1] = formatters["after offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth)
+ t[#t+1] = formatters["after offset: %p (width %p height %p depth %p)"](offset,getfield(current,"width"),getfield(current,"height"),getfield(current,"depth"))
end
- shifted[a_snapmethod] = 0
- current[a_snapmethod] = 0
+ setattr(shifted,a_snapmethod,0)
+ setattr(current,a_snapmethod,0)
end
if not height then
- current.height = ch
+ setfield(current,"height",ch)
if t then
t[#t+1] = formatters["forced height: %p"](ch)
end
end
if not depth then
- current.depth = cd
+ setfield(current,"depth",cd)
if t then
t[#t+1] = formatters["forced depth: %p"](cd)
end
@@ -479,23 +505,24 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
local lines = (ch+cd)/snaphtdp
if t then
local original = (h+d)/snaphtdp
- local whatever = (ch+cd)/(texdimen.globalbodyfontstrutheight + texdimen.globalbodyfontstrutdepth)
+ local whatever = (ch+cd)/(texgetdimen("globalbodyfontstrutheight") + texgetdimen("globalbodyfontstrutdepth"))
t[#t+1] = formatters["final lines: %s -> %s (%s)"](original,lines,whatever)
t[#t+1] = formatters["final height: %p -> %p"](h,ch)
t[#t+1] = formatters["final depth: %p -> %p"](d,cd)
end
if t then
- report_snapper("trace: %s type %s\n\t%\n\tt",where,nodecodes[current.id],t)
+ report_snapper("trace: %s type %s\n\t%\n\tt",where,nodecodes[getid(current)],t)
end
return h, d, ch, cd, lines
end
local function snap_topskip(current,method)
- local spec = current.spec
- local w = spec.width
+ local spec = getfield(current,"spec")
+ local w = getfield(spec,"width")
local wd = w
- if spec.writable then
- spec.width, wd = 0, 0
+ if getfield(spec,"writable") then
+ setfield(spec,"width",0)
+ wd = 0
end
return w, wd
end
@@ -509,14 +536,15 @@ local categories = allocate {
[5] = 'disable',
[6] = 'nowhite',
[7] = 'goback',
- [8] = 'together'
+ [8] = 'together', -- not used (?)
+ [9] = 'overlay',
}
vspacing.categories = categories
function vspacing.tocategories(str)
local t = { }
- for s in gmatch(str,"[^, ]") do
+ for s in gmatch(str,"[^, ]") do -- use lpeg instead
local n = tonumber(s)
if n then
t[categories[n]] = true
@@ -527,7 +555,7 @@ function vspacing.tocategories(str)
return t
end
-function vspacing.tocategory(str)
+function vspacing.tocategory(str) -- can be optimized
if type(str) == "string" then
return set.tonumber(vspacing.tocategories(str))
else
@@ -558,15 +586,15 @@ do -- todo: interface.variables
-- This will change: just node.write and we can store the values in skips which
-- then obeys grouping
- local fixedblankskip = context.fixedblankskip
- local flexibleblankskip = context.flexibleblankskip
- local setblankcategory = context.setblankcategory
- local setblankorder = context.setblankorder
- local setblankpenalty = context.setblankpenalty
- local setblankhandling = context.setblankhandling
- local flushblankhandling = context.flushblankhandling
- local addpredefinedblankskip = context.addpredefinedblankskip
- local addaskedblankskip = context.addaskedblankskip
+ local ctx_fixedblankskip = context.fixedblankskip
+ local ctx_flexibleblankskip = context.flexibleblankskip
+ local ctx_setblankcategory = context.setblankcategory
+ local ctx_setblankorder = context.setblankorder
+ local ctx_setblankpenalty = context.setblankpenalty
+ ----- ctx_setblankhandling = context.setblankhandling
+ local ctx_flushblankhandling = context.flushblankhandling
+ local ctx_addpredefinedblankskip = context.addpredefinedblankskip
+ local ctx_addaskedblankskip = context.addaskedblankskip
local function analyze(str,oldcategory) -- we could use shorter names
for s in gmatch(str,"([^ ,]+)") do
@@ -578,35 +606,35 @@ do -- todo: interface.variables
if mk then
category = analyze(mk,category)
elseif keyword == k_fixed then
- fixedblankskip()
+ ctx_fixedblankskip()
elseif keyword == k_flexible then
- flexibleblankskip()
+ ctx_flexibleblankskip()
elseif keyword == k_category then
local category = tonumber(detail)
if category then
- setblankcategory(category)
+ ctx_setblankcategory(category)
if category ~= oldcategory then
- flushblankhandling()
+ ctx_flushblankhandling()
oldcategory = category
end
end
elseif keyword == k_order and detail then
local order = tonumber(detail)
if order then
- setblankorder(order)
+ ctx_setblankorder(order)
end
elseif keyword == k_penalty and detail then
local penalty = tonumber(detail)
if penalty then
- setblankpenalty(penalty)
+ ctx_setblankpenalty(penalty)
end
else
amount = tonumber(amount) or 1
local sk = skip[keyword]
if sk then
- addpredefinedblankskip(amount,keyword)
+ ctx_addpredefinedblankskip(amount,keyword)
else -- no check
- addaskedblankskip(amount,keyword)
+ ctx_addaskedblankskip(amount,keyword)
end
end
end
@@ -614,22 +642,22 @@ do -- todo: interface.variables
return category
end
- local pushlogger = context.pushlogger
- local startblankhandling = context.startblankhandling
- local stopblankhandling = context.stopblankhandling
- local poplogger = context.poplogger
+ local ctx_pushlogger = context.pushlogger
+ local ctx_startblankhandling = context.startblankhandling
+ local ctx_stopblankhandling = context.stopblankhandling
+ local ctx_poplogger = context.poplogger
function vspacing.analyze(str)
if trace_vspacing then
- pushlogger(report_vspacing)
- startblankhandling()
+ ctx_pushlogger(report_vspacing)
+ ctx_startblankhandling()
analyze(str,1)
- stopblankhandling()
- poplogger()
+ ctx_stopblankhandling()
+ ctx_poplogger()
else
- startblankhandling()
+ ctx_startblankhandling()
analyze(str,1)
- stopblankhandling()
+ ctx_stopblankhandling()
end
end
@@ -655,18 +683,18 @@ local trace_list, tracing_info, before, after = { }, false, "", ""
local function nodes_to_string(head)
local current, t = head, { }
while current do
- local id = current.id
+ local id = getid(current)
local ty = nodecodes[id]
if id == penalty_code then
- t[#t+1] = formatters["%s:%s"](ty,current.penalty)
+ t[#t+1] = formatters["%s:%s"](ty,getfield(current,"penalty"))
elseif id == glue_code then -- or id == kern_code then -- to be tested
t[#t+1] = formatters["%s:%p"](ty,current)
elseif id == kern_code then
- t[#t+1] = formatters["%s:%p"](ty,current.kern)
+ t[#t+1] = formatters["%s:%p"](ty,getfield(current,"kern"))
else
t[#t+1] = ty
end
- current = current.next
+ current = getnext(current)
end
return concat(t," + ")
end
@@ -690,7 +718,7 @@ local function trace_info(message, where, what)
end
local function trace_node(what)
- local nt = nodecodes[what.id]
+ local nt = nodecodes[getid(what)]
local tl = trace_list[#trace_list]
if tl and tl[1] == "node" then
trace_list[#trace_list] = { "node", formatters["%s + %s"](tl[2],nt) }
@@ -700,8 +728,8 @@ local function trace_node(what)
end
local function trace_done(str,data)
- if data.id == penalty_code then
- trace_list[#trace_list+1] = { "penalty", formatters["%s | %s"](str,data.penalty) }
+ if getid(data) == penalty_code then
+ trace_list[#trace_list+1] = { "penalty", formatters["%s | %s"](str,getfield(data,"penalty")) }
else
trace_list[#trace_list+1] = { "glue", formatters["%s | %p"](str,data) }
end
@@ -739,31 +767,32 @@ local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
local topskip_code = skipcodes.topskip
local splittopskip_code = skipcodes.splittopskip
-local free_glue_node = free_node
-local discard, largest, force, penalty, add, disable, nowhite, goback, together = 0, 1, 2, 3, 4, 5, 6, 7, 8
-
-- local function free_glue_node(n)
--- -- free_node(n.spec)
--- print("before",n)
--- logs.flush()
-- free_node(n)
--- print("after")
--- logs.flush()
+-- local s = getfield(n,"spec")
+-- if s then
+-- free_node(s)
+-- end
-- end
+local free_glue_node = free_node
+local free_glue_spec = function() end
+----- free_glue_spec = free_node -- can be enabled in in 0.73 (so for the moment we leak due to old luatex engine issues)
+
function vspacing.snapbox(n,how)
local sv = snapmethods[how]
if sv then
- local box = texbox[n]
- local list = box.list
+ local box = getbox(n)
+ local list = getlist(box)
if list then
- local s = list[a_snapmethod]
+ local s = getattr(list,a_snapmethod)
if s == 0 then
if trace_vsnapping then
-- report_snapper("box list not snapped, already done")
end
else
- local ht, dp = box.height, box.depth
+ local ht = getfield(box,"height")
+ local dp = getfield(box,"depth")
if false then -- todo: already_done
-- assume that the box is already snapped
if trace_vsnapping then
@@ -772,29 +801,51 @@ function vspacing.snapbox(n,how)
end
else
local h, d, ch, cd, lines = snap_hlist("box",box,sv,ht,dp)
- box.height, box.depth = ch, cd
+ setfield(box,"height",ch)
+ setfield(box,"depth",cd)
if trace_vsnapping then
report_snapper("box list snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s",
h,d,ch,cd,sv.name,sv.specification,"direct",lines,listtoutf(list))
end
- box[a_snapmethod] = 0 --
- list[a_snapmethod] = 0 -- yes or no
+ setattr(box,a_snapmethod,0) --
+ setattr(list,a_snapmethod,0) -- yes or no
end
end
end
end
end
+-- I need to figure out how to deal with the prevdepth that crosses pages. In fact,
+-- prevdepth is often quite interfering (even over a next paragraph) so I need to
+-- figure out a trick. Maybe use something other than a rule. If we visualize we'll
+-- see the baselineskip in action:
+--
+-- \blank[force,5*big] { \baselineskip1cm xxxxxxxxx \par } \page
+-- \blank[force,5*big] { \baselineskip1cm xxxxxxxxx \par } \page
+-- \blank[force,5*big] { \baselineskip5cm xxxxxxxxx \par } \page
+
+-- We can register and copy the rule instead.
+
+local w, h, d = 0, 0, 0
+----- w, h, d = 100*65536, 65536, 65536
+
local function forced_skip(head,current,width,where,trace)
- if where == "after" then
- head, current = insert_node_after(head,current,new_rule(0,0,0))
+ if head == current then
+ if getsubtype(head) == baselineskip_code then
+ width = width - getfield(getfield(head,"spec"),"width")
+ end
+ end
+ if width == 0 then
+ -- do nothing
+ elseif where == "after" then
+ head, current = insert_node_after(head,current,new_rule(w,h,d))
head, current = insert_node_after(head,current,new_kern(width))
- head, current = insert_node_after(head,current,new_rule(0,0,0))
+ head, current = insert_node_after(head,current,new_rule(w,h,d))
else
local c = current
- head, current = insert_node_before(head,current,new_rule(0,0,0))
+ head, current = insert_node_before(head,current,new_rule(w,h,d))
head, current = insert_node_before(head,current,new_kern(width))
- head, current = insert_node_before(head,current,new_rule(0,0,0))
+ head, current = insert_node_before(head,current,new_rule(w,h,d))
current = c
end
if trace then
@@ -805,6 +856,149 @@ end
-- penalty only works well when before skip
+local discard = 0
+local largest = 1
+local force = 2
+local penalty = 3
+local add = 4
+local disable = 5
+local nowhite = 6
+local goback = 7
+local together = 8 -- not used (?)
+local overlay = 9
+
+-- [whatsits][hlist][glue][glue][penalty]
+
+local special_penalty_min = 32250
+local special_penalty_max = 35000
+
+local function specialpenalty(start,penalty)
+ -- nodes.showsimplelist(texlists.page_head,1)
+ local current = find_node_tail(tonut(texlists.page_head)) -- no texlists.page_tail yet
+ while current do
+ local id = getid(current)
+ if id == glue_code then
+ current = getprev(current)
+ elseif id == penalty_code then
+ local p = getfield(current,"penalty")
+ if p == penalty then
+ if trace_vspacing then
+ report_vspacing("overloading penalty %a",p)
+ end
+ return current
+ elseif p >= 10000 then
+ current = getprev(current)
+ else
+ break
+ end
+ else
+ current = getprev(current)
+ end
+ end
+end
+
+local function check_experimental_overlay(head,current) -- todo
+ local p = nil
+ local c = current
+ local n = nil
+
+setfield(head,"prev",nil) -- till we have 0.79 **
+
+ local function overlay(p, n, s, mvl)
+ local c = getprev(n)
+ while c and c ~= p do
+ local p = getprev(c)
+ free_node(c)
+ c = p
+ end
+ setfield(n,"prev",nil)
+ if not mvl then
+ setfield(p,"next",n)
+ end
+ local p_ht = getfield(p,"height")
+ local p_dp = getfield(p,"depth")
+ local n_ht = getfield(n,"height")
+ local delta = n_ht + s + p_dp
+ local k = new_kern(-delta)
+ if trace_vspacing then
+ report_vspacing("overlaying, prev height: %p, prev depth: %p, next height: %p, skips: %p, move up: %p",p_ht,p_dp,n_ht,s,delta)
+ end
+ if n_ht > p_ht then
+ -- we should adapt pagetotal ! (need a hook for that)
+ setfield(p,"height",n_ht)
+ end
+ return k
+ end
+
+ while c do
+ local id = getid(c)
+ if id == glue_code or id == penalty_code or id == kern_code then
+ -- skip (actually, remove)
+ c = getnext(c)
+ elseif id == hlist_code then
+ n = c
+ break
+ else
+ break
+ end
+ end
+ if n then
+ -- we have a next line
+ c = current
+ while c do
+ local id = getid(c)
+ if id == glue_code or id == penalty_code then
+ c = getprev(c)
+ elseif id == hlist_code then
+ p = c
+ break
+ else
+ break
+ end
+ end
+ if not p then
+ if a_snapmethod == a_snapvbox then
+ -- quit, we're not on the mvl
+ else
+ -- messy
+ local c = tonut(texlists.page_head)
+ local s = 0
+ while c do
+ local id = getid(c)
+ if id == glue_code then
+ if p then
+ s = s + getfield(getfield(c,"glue_spec"),"width")
+ end
+ elseif id == kern_code then
+ if p then
+ s = s + getfield(c,"kern")
+ end
+ elseif id == penalty_code then
+ -- skip (actually, remove)
+ elseif id == hlist_code then
+ p = c
+ s = 0
+ else
+ p = nil
+ s = 0
+ end
+ c = getnext(c)
+ end
+ if p and p ~= n then
+ local k = overlay(p,n,s,true)
+ insert_node_before(n,n,k)
+ return k, getnext(n)
+ end
+ end
+ elseif p ~= n then
+ local k = overlay(p,n,0,false )
+ insert_node_after(p,p,k)
+ return head, getnext(n)
+ end
+ end
+ return remove_node(head, current, true)
+end
+
local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also pass tail
if trace then
reset_tracing(head)
@@ -820,14 +1014,25 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if penalty_data then
local p = new_penalty(penalty_data)
if trace then trace_done("flushed due to " .. why,p) end
+if penalty_data >= 10000 then -- or whatever threshold?
+ local prev = getprev(current)
+ if getid(prev) == glue_code then -- maybe go back more, or maybe even push back before any glue
+ -- tricky case: spacing/grid-007.tex: glue penalty glue
+ head = insert_node_before(head,prev,p)
+ else
+ head = insert_node_before(head,current,p)
+ end
+else
head = insert_node_before(head,current,p)
+end
end
if glue_data then
+ local spec = getfield(glue_data,"spec")
if force_glue then
if trace then trace_done("flushed due to " .. why,glue_data) end
- head = forced_skip(head,current,glue_data.spec.width,"before",trace)
+ head = forced_skip(head,current,getfield(spec,"width"),"before",trace)
free_glue_node(glue_data)
- elseif glue_data.spec.writable then
+ elseif getfield(spec,"writable") then
if trace then trace_done("flushed due to " .. why,glue_data) end
head = insert_node_before(head,current,glue_data)
else
@@ -841,17 +1046,18 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
if trace_vsnapping then
report_snapper("global ht/dp = %p/%p, local ht/dp = %p/%p",
- texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth,
- texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth)
+ texgetdimen("globalbodyfontstrutheight"), texgetdimen("globalbodyfontstrutdepth"),
+ texgetdimen("bodyfontstrutheight"), texgetdimen("bodyfontstrutdepth")
+ )
end
if trace then trace_info("start analyzing",where,what) end
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code or id == vlist_code then
-- needs checking, why so many calls
if snap then
- local list = current.list
- local s = current[a_snapmethod]
+ local list = getlist(current)
+ local s = getattr(current,a_snapmethod)
if not s then
-- if trace_vsnapping then
-- report_snapper("mvl list not snapped")
@@ -865,7 +1071,8 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if sv then
-- check if already snapped
if list and already_done(id,list,a_snapmethod) then
- local ht, dp = current.height, current.depth
+ local ht = getfield(current,"height")
+ local dp = getfield(current,"depth")
-- assume that the box is already snapped
if trace_vsnapping then
report_snapper("mvl list already snapped at (%p,%p): %s",ht,dp,listtoutf(list))
@@ -880,32 +1087,39 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
elseif trace_vsnapping then
report_snapper("mvl %a not snapped due to unknown snap specification: %s",nodecodes[id],listtoutf(list))
end
- current[a_snapmethod] = 0
+ setattr(current,a_snapmethod,0)
end
else
--
end
-- tex.prevdepth = 0
flush("list")
- current = current.next
+ current = getnext(current)
elseif id == penalty_code then
- -- natural_penalty = current.penalty
+ -- natural_penalty = getfield(current,"penalty")
-- if trace then trace_done("removed penalty",current) end
-- head, current = remove_node(head, current, true)
- current = current.next
+ current = getnext(current)
elseif id == kern_code then
- if snap and trace_vsnapping and current.kern ~= 0 then
- report_snapper("kern of %p kept",current.kern)
+ if snap and trace_vsnapping and getfield(current,"kern") ~= 0 then
+ report_snapper("kern of %p kept",getfield(current,"kern"))
end
flush("kern")
- current = current.next
+ current = getnext(current)
elseif id == glue_code then
- local subtype = current.subtype
+ local subtype = getsubtype(current)
if subtype == userskip_code then
- local sc = current[a_skipcategory] -- has no default, no unset (yet)
- local so = current[a_skiporder] or 1 -- has 1 default, no unset (yet)
- local sp = current[a_skippenalty] -- has no default, no unset (yet)
+ local sc = getattr(current,a_skipcategory) -- has no default, no unset (yet)
+ local so = getattr(current,a_skiporder) or 1 -- has 1 default, no unset (yet)
+ local sp = getattr(current,a_skippenalty) -- has no default, no unset (yet)
if sp and sc == penalty then
+ if where == "page" and sp >= special_penalty_min and sp <= special_penalty_max then
+ local previousspecial = specialpenalty(current,sp)
+ if previousspecial then
+ setfield(previousspecial,"penalty",0)
+ sp = 0
+ end
+ end
if not penalty_data then
penalty_data = sp
elseif penalty_order < so then
@@ -920,36 +1134,38 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if trace then trace_done("flush",glue_data) end
head = insert_node_before(head,current,glue_data)
if trace then trace_natural("natural",current) end
- current = current.next
+ current = getnext(current)
else
-- not look back across head
- local previous = current.prev
- if previous and previous.id == glue_code and previous.subtype == userskip_code then
- local ps = previous.spec
- if ps.writable then
- local cs = current.spec
- if cs.writable and ps.stretch_order == 0 and ps.shrink_order == 0 and cs.stretch_order == 0 and cs.shrink_order == 0 then
- local pw, pp, pm = ps.width, ps.stretch, ps.shrink
- local cw, cp, cm = cs.width, cs.stretch, cs.shrink
+-- todo: prev can be whatsit (latelua)
+ local previous = getprev(current)
+ if previous and getid(previous) == glue_code and getsubtype(previous) == userskip_code then
+ local ps = getfield(previous,"spec")
+ if getfield(ps,"writable") then
+ local cs = getfield(current,"spec")
+ if getfield(cs,"writable") and getfield(ps,"stretch_order") == 0 and getfield(ps,"shrink_order") == 0 and getfield(cs,"stretch_order") == 0 and getfield(cs,"shrink_order") == 0 then
+ local pw, pp, pm = getfield(ps,"width"), getfield(ps,"stretch"), getfield(ps,"shrink")
+ local cw, cp, cm = getfield(cs,"width"), getfield(cs,"stretch"), getfield(cs,"shrink")
-- ps = writable_spec(previous) -- no writable needed here
-- ps.width, ps.stretch, ps.shrink = pw + cw, pp + cp, pm + cm
- previous.spec = new_gluespec(pw + cw, pp + cp, pm + cm) -- else topskip can disappear
+ free_glue_spec(ps)
+ setfield(previous,"spec",new_gluespec(pw + cw, pp + cp, pm + cm)) -- else topskip can disappear
if trace then trace_natural("removed",current) end
head, current = remove_node(head, current, true)
- -- current = previous
+ -- current = previous
if trace then trace_natural("collapsed",previous) end
- -- current = current.next
+ -- current = getnext(current)
else
if trace then trace_natural("filler",current) end
- current = current.next
+ current = getnext(current)
end
else
if trace then trace_natural("natural (no prev spec)",current) end
- current = current.next
+ current = getnext(current)
end
else
if trace then trace_natural("natural (no prev)",current) end
- current = current.next
+ current = getnext(current)
end
end
glue_order, glue_data = 0, nil
@@ -967,6 +1183,10 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
elseif sc == discard then
if trace then trace_skip("discard",sc,so,sp,current) end
head, current = remove_node(head, current, true)
+ elseif sc == overlay then
+ -- todo (overlay following line over previous
+ if trace then trace_skip("overlay",sc,so,sp,current) end
+ head, current = check_experimental_overlay(head,current,a_snapmethod)
elseif ignore_following then
if trace then trace_skip("disabled",sc,so,sp,current) end
head, current = remove_node(head, current, true)
@@ -982,12 +1202,12 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
elseif glue_order == so then
-- is now exclusive, maybe support goback as combi, else why a set
if sc == largest then
- local cs, gs = current.spec, glue_data.spec
- local cw, gw = cs.width, gs.width
+ local cs, gs = getfield(current,"spec"), getfield(glue_data,"spec")
+ local cw, gw = getfield(cs,"width"), getfield(gs,"width")
if cw > gw then
if trace then trace_skip("largest",sc,so,sp,current) end
free_glue_node(glue_data) -- also free spec
- head, current, glue_data = remove_node(head, current)
+ head, current, glue_data = remove_node(head,current)
else
if trace then trace_skip("remove smallest",sc,so,sp,current) end
head, current = remove_node(head, current, true)
@@ -995,7 +1215,7 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
elseif sc == goback then
if trace then trace_skip("goback",sc,so,sp,current) end
free_glue_node(glue_data) -- also free spec
- head, current, glue_data = remove_node(head, current)
+ head, current, glue_data = remove_node(head,current)
elseif sc == force then
-- last one counts, some day we can provide an accumulator and largest etc
-- but not now
@@ -1009,11 +1229,11 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
head, current = remove_node(head, current, true)
elseif sc == add then
if trace then trace_skip("add",sc,so,sp,current) end
- -- local old, new = glue_data.spec, current.spec
- local old, new = writable_spec(glue_data), current.spec
- old.width = old.width + new.width
- old.stretch = old.stretch + new.stretch
- old.shrink = old.shrink + new.shrink
+ -- local old, new = glue_data.spec, getfield(current,"spec")
+ local old, new = writable_spec(glue_data), getfield(current,"spec")
+ setfield(old,"width",getfield(old,"width") + getfield(new,"width"))
+ setfield(old,"stretch",getfield(old,"stretch") + getfield(new,"stretch"))
+ setfield(old,"shrink",getfield(old,"shrink") + getfield(new,"shrink"))
-- toto: order
head, current = remove_node(head, current, true)
else
@@ -1029,12 +1249,13 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
elseif subtype == lineskip_code then
if snap then
- local s = current[a_snapmethod]
+ local s = getattr(current,a_snapmethod)
if s and s ~= 0 then
- current[a_snapmethod] = 0
- if current.spec.writable then
+ setattr(current,a_snapmethod,0)
+ local spec = getfield(current,"spec")
+ if getfield(spec,"writable") then
local spec = writable_spec(current)
- spec.width = 0
+ setfield(spec,"width",0)
if trace_vsnapping then
report_snapper("lineskip set to zero")
end
@@ -1047,15 +1268,16 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if trace then trace_skip("lineskip",sc,so,sp,current) end
flush("lineskip")
end
- current = current.next
+ current = getnext(current)
elseif subtype == baselineskip_code then
if snap then
- local s = current[a_snapmethod]
+ local s = getattr(current,a_snapmethod)
if s and s ~= 0 then
- current[a_snapmethod] = 0
- if current.spec.writable then
+ setattr(current,a_snapmethod,0)
+ local spec = getfield(current,"spec")
+ if getfield(spec,"writable") then
local spec = writable_spec(current)
- spec.width = 0
+ setfield(spec,"width",0)
if trace_vsnapping then
report_snapper("baselineskip set to zero")
end
@@ -1068,16 +1290,17 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if trace then trace_skip("baselineskip",sc,so,sp,current) end
flush("baselineskip")
end
- current = current.next
+ current = getnext(current)
elseif subtype == parskip_code then
-- parskip always comes later
if ignore_whitespace then
if trace then trace_natural("ignored parskip",current) end
head, current = remove_node(head, current, true)
elseif glue_data then
- local ps, gs = current.spec, glue_data.spec
- if ps.writable and gs.writable and ps.width > gs.width then
- glue_data.spec = copy_node(ps)
+ local ps = getfield(current,"spec")
+ local gs = getfield(glue_data,"spec")
+ if getfield(ps,"writable") and getfield(gs,"writable") and getfield(ps,"width") > getfield(gs,"width") then
+ setfield(glue_data,"spec",copy_node(ps))
if trace then trace_natural("taking parskip",current) end
else
if trace then trace_natural("removed parskip",current) end
@@ -1089,9 +1312,9 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
elseif subtype == topskip_code or subtype == splittopskip_code then
if snap then
- local s = current[a_snapmethod]
+ local s = getattr(current,a_snapmethod)
if s and s ~= 0 then
- current[a_snapmethod] = 0
+ setattr(current,a_snapmethod,0)
local sv = snapmethods[s]
local w, cw = snap_topskip(current,sv)
if trace_vsnapping then
@@ -1105,43 +1328,46 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if trace then trace_skip("topskip",sc,so,sp,current) end
flush("topskip")
end
- current = current.next
+ current = getnext(current)
elseif subtype == abovedisplayskip_code then
--
if trace then trace_skip("above display skip (normal)",sc,so,sp,current) end
flush("above display skip (normal)")
- current = current.next
+ current = getnext(current)
--
elseif subtype == belowdisplayskip_code then
--
if trace then trace_skip("below display skip (normal)",sc,so,sp,current) end
flush("below display skip (normal)")
- current = current.next
- --
+ current = getnext(current)
+ --
elseif subtype == abovedisplayshortskip_code then
--
if trace then trace_skip("above display skip (short)",sc,so,sp,current) end
flush("above display skip (short)")
- current = current.next
+ current = getnext(current)
--
elseif subtype == belowdisplayshortskip_code then
--
if trace then trace_skip("below display skip (short)",sc,so,sp,current) end
flush("below display skip (short)")
- current = current.next
+ current = getnext(current)
--
else -- other glue
- if snap and trace_vsnapping and current.spec.writable and current.spec.width ~= 0 then
- report_snapper("glue %p of type %a kept",current.spec.width,skipcodes[subtype])
- --~ current.spec.width = 0
+ if snap and trace_vsnapping then
+ local spec = getfield(current,"spec")
+ if getfield(spec,"writable") and getfield(spec,"width") ~= 0 then
+ report_snapper("glue %p of type %a kept",getfield(spec,"width"),skipcodes[subtype])
+ -- setfield(spec,"width",0)
+ end
end
- if trace then trace_skip(formatted["glue of type %a"](subtype),sc,so,sp,current) end
+ if trace then trace_skip(formatters["glue of type %a"](subtype),sc,so,sp,current) end
flush("some glue")
- current = current.next
+ current = getnext(current)
end
else
- flush("something else")
- current = current.next
+ flush(formatters["node with id %a"](id))
+ current = getnext(current)
end
end
if trace then trace_info("stop analyzing",where,what) end
@@ -1162,11 +1388,13 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if not tail then tail = find_node_tail(head) end
if trace then trace_done("result",glue_data) end
if force_glue then
- head, tail = forced_skip(head,tail,glue_data.spec.width,"after",trace)
+ local spec = getfield(glue_data,"spec")
+ head, tail = forced_skip(head,tail,getfield(spec,"width"),"after",trace)
free_glue_node(glue_data)
else
head, tail = insert_node_after(head,tail,glue_data)
end
+texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevdepth handler
end
if trace then
if glue_data or penalty_data then
@@ -1174,7 +1402,7 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
show_tracing(head)
if oldhead ~= head then
- trace_info("head has been changed from %a to %a",nodecodes[oldhead.id],nodecodes[head.id])
+ trace_info("head has been changed from %a to %a",nodecodes[getid(oldhead)],nodecodes[getid(head)])
end
end
return head, true
@@ -1196,18 +1424,23 @@ local function report(message,lst)
report_vspacing(message,count_nodes(lst,true),nodeidstostring(lst))
end
+-- ugly code: we get partial lists (check if this stack is still okay) ... and we run
+-- into temp nodes (sigh)
+
function vspacing.pagehandler(newhead,where)
-- local newhead = texlists.contrib_head
if newhead then
+ newhead = tonut(newhead)
local newtail = find_node_tail(newhead) -- best pass that tail, known anyway
local flush = false
stackhack = true -- todo: only when grid snapping once enabled
+ -- todo: fast check if head = tail
for n in traverse_nodes(newhead) do -- we could just look for glue nodes
- local id = n.id
+ local id = getid(n)
if id ~= glue_code then
flush = true
- elseif n.subtype == userskip_code then
- if n[a_skipcategory] then
+ elseif getsubtype(n) == userskip_code then
+ if getattr(n,a_skipcategory) then
stackhack = true
else
flush = true
@@ -1219,35 +1452,36 @@ function vspacing.pagehandler(newhead,where)
if flush then
if stackhead then
if trace_collect_vspacing then report("appending %s nodes to stack (final): %s",newhead) end
- stacktail.next = newhead
- newhead.prev = stacktail
+ setfield(stacktail,"next",newhead)
+ setfield(newhead,"prev",stacktail)
newhead = stackhead
stackhead, stacktail = nil, nil
end
if stackhack then
stackhack = false
if trace_collect_vspacing then report("processing %s nodes: %s",newhead) end
---~ texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
- newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
+ -- texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
+ newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
else
if trace_collect_vspacing then report("flushing %s nodes: %s",newhead) end
---~ texlists.contrib_head = newhead
+ -- texlists.contrib_head = newhead
end
+ return tonode(newhead)
else
if stackhead then
if trace_collect_vspacing then report("appending %s nodes to stack (intermediate): %s",newhead) end
- stacktail.next = newhead
- newhead.prev = stacktail
+ setfield(stacktail,"next",newhead)
+ setfield(newhead,"prev",stacktail)
else
if trace_collect_vspacing then report("storing %s nodes in stack (initial): %s",newhead) end
stackhead = newhead
end
stacktail = newtail
-- texlists.contrib_head = nil
- newhead = nil
+ -- newhead = nil
end
end
- return newhead
+ return nil
end
local ignore = table.tohash {
@@ -1257,98 +1491,38 @@ local ignore = table.tohash {
}
function vspacing.vboxhandler(head,where)
- if head and not ignore[where] and head.next then
- -- starttiming(vspacing)
- head = collapser(head,"vbox",where,trace_vbox_vspacing,true,a_snapvbox) -- todo: local snapper
- -- stoptiming(vspacing)
+ if head and not ignore[where] then
+ local h = tonut(head)
+ if getnext(h) then
+ h = collapser(h,"vbox",where,trace_vbox_vspacing,true,a_snapvbox) -- todo: local snapper
+ return tonode(h)
+ end
end
return head
end
function vspacing.collapsevbox(n) -- for boxes but using global a_snapmethod
- local list = texbox[n].list
- if list then
- -- starttiming(vspacing)
- texbox[n].list = vpack_node(collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod))
- -- stoptiming(vspacing)
- end
-end
-
--- We will split this module so a few locals are repeated. Also this will be
--- rewritten.
-
-nodes.builders = nodes.builder or { }
-local builders = nodes.builders
-
-local actions = nodes.tasks.actions("vboxbuilders")
-
-function builders.vpack_filter(head,groupcode,size,packtype,maxdepth,direction)
- local done = false
- if head then
- starttiming(builders)
- if trace_vpacking then
- local before = nodes.count(head)
- head, done = actions(head,groupcode,size,packtype,maxdepth,direction)
- local after = nodes.count(head)
- if done then
- nodes.processors.tracer("vpack","changed",head,groupcode,before,after,true)
- else
- nodes.processors.tracer("vpack","unchanged",head,groupcode,before,after,true)
- end
- else
- head, done = actions(head,groupcode)
+ local box = getbox(n)
+ if box then
+ local list = getlist(box)
+ if list then
+ list = collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod)
+ setfield(box,"list",vpack_node(list))
end
- stoptiming(builders)
end
- return head, done
end
--- This one is special in the sense that it has no head and we operate on the mlv. Also,
--- we need to do the vspacing last as it removes items from the mvl.
+-- This one is needed to prevent bleeding of prevdepth to the next page
+-- which doesn't work well with forced skips.
-local actions = nodes.tasks.actions("mvlbuilders")
+local outer = texnest[0]
-local function report(groupcode,head)
- report_page_builder("trigger: %s",groupcode)
- report_page_builder(" vsize : %p",tex.vsize)
- report_page_builder(" pagegoal : %p",tex.pagegoal)
- report_page_builder(" pagetotal: %p",tex.pagetotal)
- report_page_builder(" list : %s",head and nodeidstostring(head) or "")
-end
-
-function builders.buildpage_filter(groupcode)
- local head, done = texlists.contrib_head, false
- -- if head and head.next and head.next.id == hlist_code and head.next.width == 1 then
- -- report_page_builder("trigger otr calculations")
- -- free_node_list(head)
- -- head = nil
- -- end
- if head then
- starttiming(builders)
- if trace_page_builder then
- report(groupcode,head)
- end
- head, done = actions(head,groupcode)
- stoptiming(builders)
- -- -- doesn't work here (not passed on?)
- -- tex.pagegoal = tex.vsize - tex.dimen.d_page_floats_inserted_top - tex.dimen.d_page_floats_inserted_bottom
- texlists.contrib_head = head
- return done and head or true
- else
- if trace_page_builder then
- report(groupcode)
- end
- return nil, false
+function vspacing.resetprevdepth()
+ if texlists.hold_head then
+ outer.prevdepth = 0
end
end
-callbacks.register('vpack_filter', builders.vpack_filter, "vertical spacing etc")
-callbacks.register('buildpage_filter', builders.buildpage_filter, "vertical spacing etc (mvl)")
-
-statistics.register("v-node processing time", function()
- return statistics.elapsedseconds(builders)
-end)
-
-- interface
commands.vspacing = vspacing.analyze
@@ -1356,3 +1530,5 @@ commands.vspacingsetamount = vspacing.setskip
commands.vspacingdefine = vspacing.setmap
commands.vspacingcollapse = vspacing.collapsevbox
commands.vspacingsnap = vspacing.snapbox
+commands.resetprevdepth = vspacing.resetprevdepth
+commands.definesnapmethod = vspacing.definesnapmethod
diff --git a/Master/texmf-dist/tex/context/base/spac-ver.mkiv b/Master/texmf-dist/tex/context/base/spac-ver.mkiv
index ee78d89640a..0c84958beb0 100644
--- a/Master/texmf-dist/tex/context/base/spac-ver.mkiv
+++ b/Master/texmf-dist/tex/context/base/spac-ver.mkiv
@@ -152,10 +152,14 @@
\unexpanded\def\setupinterlinespace
{\dodoubleempty\spac_linespacing_setup}
+\ifdefined\setupinterlinespace_double \else
+ \let\setupinterlinespace_double\setup_interlinespace % for a while
+\fi
+
\def\spac_linespacing_setup[#1][#2]%
{\settrue\interlinespaceisset % reset has to be done when needed
\ifsecondargument
- \setup_interlinespace[#1][#2]%
+ \setupinterlinespace_double[#1][#2]%
\else\iffirstargument
\ifcsname\namedinterlinespacehash{#1}\s!parent\endcsname
\edef\currentinterlinespace{#1}%
@@ -330,7 +334,7 @@
\let\v_spac_whitespace_current\v!none
\unexpanded\def\setupwhitespace
- {\doifnextoptionalelse\spac_whitespace_setup_yes\spac_whitespace_setup_nop}
+ {\doifnextoptionalcselse\spac_whitespace_setup_yes\spac_whitespace_setup_nop}
\def\spac_whitespace_setup_nop
{\ifx\v_spac_whitespace_current\v!none\else
@@ -542,6 +546,8 @@
\ignorespaces
\let\spac_lines_stop_correction\spac_lines_stop_correction_yes}
+% still not ok ... will move to the lua end ... needs a final solution
+
\unexpanded\def\spac_lines_stop_correction_yes
{\removeunwantedspaces
\egroup
@@ -549,6 +555,11 @@
\blank[\v!white]%
\snaptogrid\hbox{\box\scratchbox}%
\else
+\blank[\v!nowhite]%
+\ifdim\parskip>\zeropoint
+ % too fuzzy otherwise
+\else
+ % doesn't like whitespace
\ifdim\d_spac_prevdepth<\maxdimen
\unless\ifdim\d_spac_prevdepth<\zeropoint
\ifdim\d_spac_prevdepth<\strutdp \relax
@@ -562,6 +573,7 @@
\fi
\fi
\fi
+\fi
\ifdim\pagegoal<\maxdimen
\blank[\v!white,\the\d_spac_lines_correction_before]% \blank[\v!white]\dotopbaselinecorrection
\fi
@@ -715,6 +727,10 @@
\unexpanded\def\setmaxdepth
{\maxdepth\systemmaxdepthfactor\globalbodyfontsize}
+\let\normalbaselineskip \relax \newskip \normalbaselineskip % these got lost in the transition to mkiv due
+\let\normallineskip \relax \newskip \normallineskip % to auto-\normal* definitions and registers
+\let\normallineskiplimit\relax \newdimen\normallineskiplimit % being protected
+
\unexpanded\def\normalbaselines
{\baselineskip \normalbaselineskip
\lineskip \normallineskip
@@ -913,7 +929,7 @@
\s!depth \strutdp}}
\def\spac_struts_set_vide
- {\setbox\strutbox\hbox
+ {\setbox\strutbox\hbox % at some time this extra wrapping was needed
{\spac_struts_vide_hbox to \zeropoint
{% \hss % new, will be option
\vrule
@@ -939,8 +955,8 @@
\unexpanded\def\strut % still callbacks for \hbox{\strut}
{\relax
- \dontleavehmode
- \copy\strutbox}
+ \dontleavehmode
+ \copy\strutbox}
\let\normalstrut\strut
@@ -984,7 +1000,7 @@
\setcharstrut\m_strut
\fi}
-\unexpanded\def\showstruts
+\unexpanded\def\showstruts % adapts .. is wrong
{\setteststrut
\settestcrlf}
@@ -1150,6 +1166,10 @@
\let\normaloffinterlineskip\offinterlineskip % knuth's original
+\appendtoks
+ \ifvmode\ctxcommand{resetprevdepth()}\fi % a nasty hack (tested for a while now)
+\to \everyafteroutput
+
%D My own one:
\unexpanded\def\spac_helpers_push_interlineskip_yes
@@ -1321,7 +1341,7 @@
\unexpanded\def\installsnapvalues#1#2% todo: a proper define
{\edef\currentsnapper{#1:#2}%
\ifcsname\??gridsnapperattributes\currentsnapper\endcsname \else
- \setevalue{\??gridsnapperattributes\currentsnapper}{\ctxlua{builders.vspacing.definesnapmethod("#1","#2")}}%
+ \setevalue{\??gridsnapperattributes\currentsnapper}{\ctxcommand{definesnapmethod("#1","#2")}}%
\fi
\setevalue{\??gridsnappers#1}{\attribute\snapmethodattribute\csname\??gridsnapperattributes\currentsnapper\endcsname\space}}
@@ -1747,7 +1767,7 @@
% The main spacer:
\unexpanded\def\vspacing
- {\doifnextoptionalelse\spac_vspacing_yes\spac_vspacing_nop}
+ {\doifnextoptionalcselse\spac_vspacing_yes\spac_vspacing_nop}
\def\spac_vspacing_yes
{\ifinpagebody % somewhat weird
@@ -1803,7 +1823,7 @@
% these depend on bigskipamount cum suis so we'd better sync them
\unexpanded\def\setupvspacing
- {\doifnextoptionalelse\setupvspacing_yes\setupvspacing_nop}
+ {\doifnextoptionalcselse\setupvspacing_yes\setupvspacing_nop}
\let\currentvspacing\s!default % hm, default, standard ...
@@ -1856,6 +1876,14 @@
\fi\fi
\relax}
+% \strut \hfill first line \blank[overlay] second line \hfill \strut
+%
+% \ruledvbox {
+% \strut \hfill line 1 \blank[overlay]
+% line 2 \hfill \strut \blank[overlay]
+% \strut \hfill line 3 \hfill \strut
+% }
+
\definevspacing[\v!preference][penalty:-500] % goodbreak
\definevspacing[\v!samepage] [penalty:10000] % nobreak
\definevspacing[\v!max] [category:1]
@@ -1863,14 +1891,22 @@
\definevspacing[\v!disable] [category:5]
\definevspacing[\v!nowhite] [category:6]
\definevspacing[\v!back] [category:7]
+% together [category:8]
+\definevspacing[\v!overlay] [category:9]
\definevspacing[\v!always] [category:0]
\definevspacing[\v!weak] [order:0]
\definevspacing[\v!strong] [order:100]
\definevspacing[\s!default] [\v!white] % was big for a while
-\dorecurse{10} % todo: other values < 4000
- {\normalexpanded{\definevspacing[\v!samepage-\recurselevel][penalty:\the\numexpr4000+250*\recurselevel\relax]}}
+% \dorecurse{10} % todo: other values < 4000
+% {\normalexpanded{\definevspacing[\v!samepage-\recurselevel][penalty:\the\numexpr4000+250*\recurselevel\relax]}}
+
+\newcount\c_spac_vspacing_special_base \c_spac_vspacing_special_base = 32250 % 4000
+\newcount\c_spac_vspacing_special_step \c_spac_vspacing_special_step = 10 % 250
+
+\dorecurse{10}
+ {\normalexpanded{\definevspacing[\v!samepage-\recurselevel][penalty:\the\numexpr\c_spac_vspacing_special_base+\c_spac_vspacing_special_step*\recurselevel\relax]}}
\definevspacing[\v!default] [\v!big] % todo: needs to adapt to \setupblank
\definevspacing[\v!before] [\v!default] % but we need to avoid circular references
@@ -2073,4 +2109,42 @@
%
% \def\shapefill{\vskip\zeropoint\s!plus\lineheight\s!minus\lineheight\relax}
+%D Nasty:
+
+% \writestatus{1}{\the\prevdepth} \blank[force,5*big] { \writestatus{1}{\the\prevdepth} \baselineskip1cm xxxxxxxxx \par } \page
+% \writestatus{2}{\the\prevdepth} \blank[force,5*big] { \writestatus{2}{\the\prevdepth} \baselineskip1cm xxxxxxxxx \par } \page
+% \writestatus{3}{\the\prevdepth} \blank[force,5*big] { \writestatus{3}{\the\prevdepth} \baselineskip5cm xxxxxxxxx \par } \page
+% \writestatus{4}{\the\prevdepth} \input tufte \page
+% \writestatus{5}{\the\prevdepth} \input tufte \page
+% \writestatus{6}{\the\prevdepth} \blank[force,5*big] { \writestatus{6}{\the\prevdepth} \baselineskip1cm xxxxxxxxx \par } \page
+
+% \writestatus{1}{\the\prevdepth} \null\vskip4cm { \writestatus{1}{\the\prevdepth} \baselineskip1cm xxxxxxxxx \par } \page
+% \writestatus{2}{\the\prevdepth} \null\vskip4cm { \writestatus{2}{\the\prevdepth} \baselineskip1cm xxxxxxxxx \par } \page
+% \writestatus{3}{\the\prevdepth} \null\vskip4cm { \writestatus{3}{\the\prevdepth} \baselineskip5cm xxxxxxxxx \par } \page
+% \writestatus{4}{\the\prevdepth} \input tufte \page
+% \writestatus{5}{\the\prevdepth} \input tufte \page
+% \writestatus{6}{\the\prevdepth} \null\vskip4cm { \writestatus{6}{\the\prevdepth} \baselineskip1cm xxxxxxxxx \par } \page
+
+\appendtoks
+ \ifvmode\prevdepth\zeropoint\fi % consistent, else first page -1000pt .. needed for fixed,3*big first/successive pages consistency
+\to \everystarttext
+
+\prevdepth\zeropoint
+
+% not ok, so we need to figure out another way to fix this messy prevdepth-across-page issue
+% as encountered in forced blank skips (see lua code)
+%
+% \appendtoks
+% \ifvmode\ctxcommand{resetprevdepth()}\fi
+% \to \everyafteroutput
+%
+% this should only happen when there is nothing left over (how to detemine that) .. testcase:
+%
+% \dorecurse{41}{line\par}
+% \starttyping
+% line 1
+% line 2
+% line 3
+% \stoptyping
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/status-files.pdf b/Master/texmf-dist/tex/context/base/status-files.pdf
index 097fe5a2dca..f7a228bfc0f 100644
Binary files a/Master/texmf-dist/tex/context/base/status-files.pdf and b/Master/texmf-dist/tex/context/base/status-files.pdf differ
diff --git a/Master/texmf-dist/tex/context/base/status-lua.pdf b/Master/texmf-dist/tex/context/base/status-lua.pdf
index f727ca843ec..547c0e78579 100644
Binary files a/Master/texmf-dist/tex/context/base/status-lua.pdf and b/Master/texmf-dist/tex/context/base/status-lua.pdf differ
diff --git a/Master/texmf-dist/tex/context/base/status-mkiv.lua b/Master/texmf-dist/tex/context/base/status-mkiv.lua
index 443eee60ec4..07e912a88ba 100644
--- a/Master/texmf-dist/tex/context/base/status-mkiv.lua
+++ b/Master/texmf-dist/tex/context/base/status-mkiv.lua
@@ -1259,6 +1259,12 @@ return {
loading = "always",
status = "okay",
},
+ {
+ category = "mkiv",
+ filename = "typo-tal",
+ loading = "always",
+ status = "okay",
+ },
{
category = "mkiv",
comment = "somewhat weird",
@@ -1490,12 +1496,6 @@ return {
loading = "always",
status = "okay",
},
- {
- category = "mkiv",
- filename = "spac-cha",
- loading = "always",
- status = "okay",
- },
{
category = "mkiv",
comment = "work in progress",
@@ -1586,8 +1586,13 @@ return {
},
{
category = "mkiv",
- comment = "might get extended",
- filename = "typo-par",
+ filename = "typo-drp",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "typo-fln",
loading = "always",
status = "okay",
},
@@ -1679,6 +1684,12 @@ return {
loading = "experimental",
status = "okay",
},
+ {
+ category = "mkiv",
+ filename = "meta-fnt",
+ loading = "always",
+ status = "okay",
+ },
{
category = "mkiv",
filename = "meta-tex",
@@ -1817,6 +1828,12 @@ return {
loading = "always",
status = "okay",
},
+ {
+ category = "mkvi",
+ filename = "math-acc",
+ loading = "always",
+ status = "okay",
+ },
{
category = "mkiv",
comment = "at least for the moment",
@@ -1849,6 +1866,12 @@ return {
loading = "always",
status = "okay",
},
+ {
+ category = "mkiv",
+ filename = "math-rad",
+ loading = "always",
+ status = "okay",
+ },
{
category = "mkiv",
comment = "code might move to here",
@@ -1991,13 +2014,13 @@ return {
{
category = "mkiv",
filename = "bibl-bib",
- loading = "always",
+ loading = "on demand",
status = "pending",
},
{
category = "mkiv",
filename = "bibl-tra",
- loading = "always",
+ loading = "on demand",
status = "pending",
},
{
@@ -2511,6 +2534,60 @@ return {
loading = "on demand",
status = "okay",
},
+ {
+ category = "mkiv",
+ filename = "publ-ini",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-old",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-tra",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-usr",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-xml",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-imp-apa",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-imp-cite",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-imp-definitions",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-imp-commands",
+ loading = "always",
+ status = "pending",
+ },
},
lua = {
{
@@ -2583,12 +2660,12 @@ return {
{
category = "lua",
filename = "bibl-bib",
- status = "todo",
+ loading = "on demand",
},
{
category = "lua",
filename = "bibl-tra",
- status = "todo",
+ loading = "on demand",
},
{
category = "lua",
@@ -3128,6 +3205,12 @@ return {
loading = "font-lib",
status = "okay",
},
+ {
+ category = "lua",
+ filename = "font-inj",
+ loading = "font-lib",
+ status = "okay",
+ },
{
category = "lua",
filename = "font-ldr",
@@ -3714,7 +3797,12 @@ return {
{
category = "lua",
filename = "m-database",
- status = "todo",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "m-nodechart",
+ status = "okay",
},
{
category = "lua",
@@ -3836,6 +3924,12 @@ return {
loading = "meta-lua",
status = "okay",
},
+ {
+ category = "lua",
+ filename = "meta-fnt",
+ loading = "meta-fnt",
+ status = "okay",
+ },
{
category = "lua",
comment = "could be done nicer nowadays but who needs it",
@@ -3999,6 +4093,11 @@ return {
filename = "node-pag",
status = "todo",
},
+ {
+ category = "lua",
+ filename = "node-ppt",
+ status = "todo",
+ },
{
category = "lua",
filename = "node-pro",
@@ -4742,13 +4841,40 @@ return {
{
category = "lua",
filename = "typo-dir",
+ loading = "typo-dir",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "work in progress",
+ filename = "typo-dha",
+ loading = "typo-dir",
status = "todo",
},
+ {
+ category = "lua",
+ filename = "typo-dua",
+ loading = "typo-dir",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ comment = "work in progress",
+ filename = "typo-dub",
+ loading = "typo-dir",
+ status = "okay",
+ },
{
category = "lua",
filename = "typo-ini",
status = "todo",
},
+ {
+ category = "mkiv",
+ filename = "typo-tal",
+ loading = "typo-tal",
+ status = "okay",
+ },
{
category = "lua",
filename = "typo-itc",
@@ -4771,8 +4897,13 @@ return {
},
{
category = "lua",
- filename = "typo-par",
- status = "todo",
+ filename = "typo-drp",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "typo-fln",
+ status = "okay",
},
{
category = "lua",
@@ -4931,6 +5062,42 @@ return {
filename = "x-mathml",
status = "todo",
},
+ {
+ category = "lua",
+ filename = "publ-ini",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-aut",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-dat",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-oth",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-tra",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-usr",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
},
metafun = {
{
@@ -5090,7 +5257,12 @@ return {
{
category = "mkiv",
filename = "m-database",
- status = "todo",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
+ filename = "m-nodechart",
+ status = "okay",
},
{
category = "tex",
diff --git a/Master/texmf-dist/tex/context/base/strc-bkm.lua b/Master/texmf-dist/tex/context/base/strc-bkm.lua
index d9c268ce4d9..c38ab3c2e8c 100644
--- a/Master/texmf-dist/tex/context/base/strc-bkm.lua
+++ b/Master/texmf-dist/tex/context/base/strc-bkm.lua
@@ -85,7 +85,7 @@ end
local function stripped(str) -- kind of generic
str = gsub(str,"\\([A-Z]+)","%1") -- \LOGO
str = gsub(str,"\\ "," ") -- \
- str = gsub(str,"\\([A-Za-z]+) *{(.-)}","%1") -- \bla{...}
+ str = gsub(str,"\\([A-Za-z]+) *{(.-)}","%2") -- \bla{...}
str = gsub(str," +"," ") -- spaces
return str
end
@@ -101,11 +101,65 @@ function bookmarks.setup(spec)
end
end
+-- function bookmarks.place()
+-- if next(names) then
+-- local list = lists.filtercollected(names,"all",nil,lists.collected,forced)
+-- if #list > 0 then
+-- local levels, noflevels, lastlevel = { }, 0, 1
+-- for i=1,#list do
+-- local li = list[i]
+-- local metadata = li.metadata
+-- local name = metadata.name
+-- if not metadata.nolist or forced[name] then -- and levelmap[name] then
+-- local titledata = li.titledata
+-- if titledata then
+-- local structural = levelmap[name]
+-- lastlevel = structural or lastlevel
+-- local title = titledata.bookmark
+-- if not title or title == "" then
+-- -- We could typeset the title and then convert it.
+-- if not structural then
+-- -- placeholder, todo: bookmarklabel
+-- title = name .. ": " .. (titledata.title or "?")
+-- else
+-- title = titledata.title or "?"
+-- end
+-- end
+-- if numbered[name] then
+-- local sectiondata = sections.collected[li.references.section]
+-- local numberdata = li.numberdata
+-- if sectiondata and numberdata and not numberdata.hidenumber then
+-- -- we could typeset the number and convert it
+-- title = concat(sections.typesetnumber(sectiondata,"direct",numberspec,sectiondata)) .. " " .. title
+-- end
+-- end
+-- noflevels = noflevels + 1
+-- levels[noflevels] = {
+-- lastlevel,
+-- stripped(title), -- can be replaced by converter
+-- li.references, -- has internal and realpage
+-- allopen or opened[name]
+-- }
+-- end
+-- end
+-- end
+-- bookmarks.finalize(levels)
+-- end
+-- function bookmarks.place() end -- prevent second run
+-- end
+-- end
+
function bookmarks.place()
if next(names) then
- local list = lists.filtercollected(names,"all",nil,lists.collected,forced)
- if #list > 0 then
- local levels, noflevels, lastlevel = { }, 0, 1
+ local levels = { }
+ local noflevels = 0
+ local lastlevel = 1
+ local nofblocks = #lists.sectionblocks -- always >= 1
+ local showblocktitle = toboolean(numberspec.showblocktitle,true)
+ for i=1,nofblocks do
+ local block = lists.sectionblocks[i]
+ local blockdone = nofblocks == 1
+ local list = lists.filtercollected(names,block..":all",nil,lists.collected,forced)
for i=1,#list do
local li = list[i]
local metadata = li.metadata
@@ -113,8 +167,26 @@ function bookmarks.place()
if not metadata.nolist or forced[name] then -- and levelmap[name] then
local titledata = li.titledata
if titledata then
+ if not blockdone then
+ if showblocktitle then
+ -- add block entry
+ local blockdata = sections.sectionblockdata[block]
+ noflevels = noflevels + 1
+ levels[noflevels] = {
+ 1, -- toplevel
+ stripped(blockdata.bookmark ~= "" and blockdata.bookmark or block),
+ li.references,
+ allopen or opened[name] -- same as first entry
+ }
+ end
+ blockdone = true
+ end
local structural = levelmap[name]
lastlevel = structural or lastlevel
+ if nofblocks > 1 then
+ -- we have a block so increase the level
+ lastlevel = lastlevel + 1
+ end
local title = titledata.bookmark
if not title or title == "" then
-- We could typeset the title and then convert it.
@@ -143,8 +215,8 @@ function bookmarks.place()
end
end
end
- bookmarks.finalize(levels)
end
+ bookmarks.finalize(levels)
function bookmarks.place() end -- prevent second run
end
end
diff --git a/Master/texmf-dist/tex/context/base/strc-bkm.mkiv b/Master/texmf-dist/tex/context/base/strc-bkm.mkiv
index 8e1252329e4..9d2ebd796c6 100644
--- a/Master/texmf-dist/tex/context/base/strc-bkm.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-bkm.mkiv
@@ -55,8 +55,9 @@
\installsetuponlycommandhandler \??bookmark {bookmark} % installdirectparametersethandler
\setupbookmark
- [\c!force=\v!no, % it's easier to force that to inhibit
- \c!number=\v!yes] % might become v!no
+ [\c!force=\v!no, % it's easier to force that to inhibit
+ \c!number=\v!yes, % might become v!no
+ \c!sectionblock=\v!no] % show sectionblock level + title
\let\setupbookmarks\setupbookmark
@@ -117,11 +118,12 @@
\appendtoks
\ctxcommand{setupbookmarks {
- separatorset = "\bookmarkparameter\c!numberseparatorset",
- conversionset = "\bookmarkparameter\c!numberconversionset",
- starter = \!!bs\bookmarkparameter\c!numberstarter\!!es,
- stopper = \!!bs\bookmarkparameter\c!numberstopper\!!es,
- segments = "\bookmarkparameter\c!numbersegments",
+ separatorset = "\bookmarkparameter\c!numberseparatorset",
+ conversionset = "\bookmarkparameter\c!numberconversionset",
+ starter = \!!bs\bookmarkparameter\c!numberstarter\!!es,
+ stopper = \!!bs\bookmarkparameter\c!numberstopper\!!es,
+ segments = "\bookmarkparameter\c!numbersegments",
+ showblocktitle = "\bookmarkparameter\c!sectionblock",
}}%
\to \everysetupbookmark
diff --git a/Master/texmf-dist/tex/context/base/strc-blk.lua b/Master/texmf-dist/tex/context/base/strc-blk.lua
index 791f8f99b4c..ce3304d59af 100644
--- a/Master/texmf-dist/tex/context/base/strc-blk.lua
+++ b/Master/texmf-dist/tex/context/base/strc-blk.lua
@@ -13,7 +13,10 @@ local find, format, validstring = string.find, string.format, string.valid
local settings_to_set, settings_to_array = utilities.parsers.settings_to_set, utilities.parsers.settings_to_array
local allocate = utilities.storage.allocate
-local structures, context = structures, context
+local context = context
+local commands = commands
+
+local structures = structures
structures.blocks = structures.blocks or { }
@@ -75,7 +78,7 @@ end
function blocks.select(state,name,tag,criterium)
criterium = criterium or "text"
- if find(tag,"=") then tag = "" end
+ if find(tag,"=",1,true) then tag = "" end
local names = settings_to_set(name)
local all = tag == ""
local tags = not all and settings_to_set(tag)
diff --git a/Master/texmf-dist/tex/context/base/strc-con.mkvi b/Master/texmf-dist/tex/context/base/strc-con.mkvi
index 11cd31983c2..1862b00a6fd 100644
--- a/Master/texmf-dist/tex/context/base/strc-con.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-con.mkvi
@@ -246,6 +246,24 @@
\newconditional\c_strc_constructions_distance_none
+\def\strc_constructions_set_width_and_distance
+ {\assignwidth
+ \p_strc_constructions_width
+ \constructionsheadwidth
+ {\unhcopy\constructionheadbox}
+ \constructionsheaddistance}
+
+\def\strc_constructions_preroll_head#content%
+ {\setbox\constructionheadbox\hbox
+ {\forgetall
+ \dontcomplain
+ \settrialtypesetting
+ \csname\??constructionmainhandler\currentconstructionhandler\endcsname#content}}
+
+\def\strc_constructions_ignore_head
+ {\constructionsheaddistance\zeropoint
+ \constructionsheadwidth \zeropoint}
+
\unexpanded\setvalue{\??constructionstarthandler\v!construction}% this will be redone (reorganized) .. too much boxing
{\dostarttagged\t!construction\currentconstruction
\dotagsetconstruction
@@ -283,21 +301,34 @@
\fi
% inefficient and not always needed, for instance not with margins so we will make checkers
% per alternative some day (especially in labels this is unwanted overhead)
- \setbox\constructionheadbox\hbox
- {\forgetall
- \dontcomplain
- \settrialtypesetting
- \edef\p_strc_constructions_sample{\constructionparameter\c!sample}%
- \ifx\p_strc_constructions_sample\empty
- \csname\??constructionmainhandler\currentconstructionhandler\endcsname\currentconstructiontext
- \else
- \csname\??constructionmainhandler\currentconstructionhandler\endcsname\currentconstructionsample
- \fi}%
- \assignwidth
- \p_strc_constructions_width
- \constructionsheadwidth
- {\unhcopy\constructionheadbox}
- \constructionsheaddistance
+ %
+ % maybe we should have an option as i cannot oversee the consequences now
+ %
+ % \edef\p_strc_constructions_sample{\constructionparameter\c!sample}%
+ % \ifx\p_strc_constructions_sample\empty
+ % \strc_constructions_preroll_head\currentconstructiontext
+ % \else
+ % \strc_constructions_preroll_head\currentconstructiontextsample
+ % \fi
+ % \strc_constructions_set_width_and_distance
+ %
+ \strc_constructions_preroll_head\currentconstructiontext
+ \ifzeropt\wd\constructionheadbox
+ \strc_constructions_ignore_head
+ \else
+ \edef\p_strc_constructions_sample{\constructionparameter\c!sample}%
+ \ifx\p_strc_constructions_sample\empty
+ \strc_constructions_set_width_and_distance
+ \else
+ \strc_constructions_preroll_head\currentconstructionsample
+ \ifzeropt\wd\constructionheadbox
+ \strc_constructions_ignore_head
+ \else
+ \strc_constructions_set_width_and_distance
+ \fi
+ \fi
+ \fi
+ %
\dostarttagged\t!constructiontag\empty % todo
\setbox\constructionheadbox\hbox
{\forgetall
@@ -709,7 +740,8 @@
\startsetups[\??constructionrenderings:\v!serried:\v!fit]
\let\\=\crlf
\noindent
- \ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox
+ \ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox % why copy? leftover?
+ \penalty\plustenthousand % new
\hskip\constructionsheaddistance\relax
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
@@ -718,8 +750,9 @@
\startsetups[\??constructionrenderings:\v!serried:\v!broad]
\let\\=\crlf
\noindent
- \ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox
+ \ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox % why copy? leftover?
\ifconditional\c_strc_constructions_distance_none \else
+ \penalty\plustenthousand % new
\hskip\constructionsheaddistance \!!plus .5\constructionsheaddistance \!!minus .25\constructionsheaddistance\relax
\fi
\useconstructionstyleandcolor\c!style\c!color
@@ -947,10 +980,11 @@
}
}\relax
% \writestatus{constructions}{registering \currentconstruction: \number\scratchcounter}%
+ \ctxcommand{setinternalreference("\referenceprefix","\currentconstructionreference",\nextinternalreference,"\interactionparameter\c!focus")}%
\normalexpanded{%
\endgroup
\edef\noexpand\currentconstructionlistentry {\the\scratchcounter}%
- \edef\noexpand\currentconstructionattribute {\ctxcommand {setinternalreference("\referenceprefix","\currentconstructionreference",\nextinternalreference,"\interactionparameter\c!focus")}}%
+ \edef\noexpand\currentconstructionattribute {\the\lastdestinationattribute}%
\edef\noexpand\currentconstructionsynchronize{\ctxlatecommand{enhancelist(\the\scratchcounter)}}%
}%
\fi}
@@ -960,7 +994,7 @@
% macros.
\def\reinstateconstructionnumberentry#1% was xdef
- {\edef\currentconstructionattribute {\ctxcommand {getinternalreference(#1)}}%
+ {\edef\currentconstructionattribute {\ctxcommand {getinternallistreference(#1)}}%
\edef\currentconstructionsynchronize{\ctxlatecommand{enhancelist(#1)}}}
\installstructurelistprocessor{construction}{\usestructurelistprocessor{number+title}}
diff --git a/Master/texmf-dist/tex/context/base/strc-des.mkvi b/Master/texmf-dist/tex/context/base/strc-des.mkvi
index 5635ff0df58..fa20d3caea9 100644
--- a/Master/texmf-dist/tex/context/base/strc-des.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-des.mkvi
@@ -102,7 +102,7 @@
\unexpanded\def\strc_descriptions_start#1%
{\begingroup
\strc_constructions_initialize{#1}%
- \doifnextoptionalelse\strc_descriptions_start_yes\strc_descriptions_start_nop}
+ \doifnextoptionalcselse\strc_descriptions_start_yes\strc_descriptions_start_nop}
\unexpanded\def\strc_descriptions_start_yes[#1]%
{\doifassignmentelse{#1}\strc_descriptions_start_yes_assignment\strc_descriptions_start_yes_reference[#1]}
@@ -162,7 +162,7 @@
\unexpanded\def\strc_descriptions_command#1%
{\begingroup
\strc_constructions_initialize{#1}%
- \doifnextoptionalelse\strc_descriptions_yes\strc_descriptions_nop}
+ \doifnextoptionalcselse\strc_descriptions_yes\strc_descriptions_nop}
\unexpanded\def\strc_descriptions_yes
{\ifconditional\c_strc_constructions_title_state
@@ -239,7 +239,7 @@
\c!style=\v!normal,
\c!color=,
\c!width=\v!broad,
- \c!sample=\hskip4\emwidth,
+ \c!sample=\kern4\emwidth, % was \hskip but that no longer is okay
\c!before=,
\c!after=]
diff --git a/Master/texmf-dist/tex/context/base/strc-doc.lua b/Master/texmf-dist/tex/context/base/strc-doc.lua
index 50a9e67a086..38830a4e77c 100644
--- a/Master/texmf-dist/tex/context/base/strc-doc.lua
+++ b/Master/texmf-dist/tex/context/base/strc-doc.lua
@@ -41,9 +41,10 @@ local trace_detail = false trackers.register("structures.detail", fu
local report_structure = logs.reporter("structure","sectioning")
-local structures = structures
local context = context
+local commands = commands
+local structures = structures
local helpers = structures.helpers
local documents = structures.documents
local sections = structures.sections
@@ -60,6 +61,10 @@ local strippedprocessor = processors.stripped
local a_internal = attributes.private('internal')
+local ctx_convertnumber = context.convertnumber
+local ctx_sprint = context.sprint
+local ctx_finalizeauto = context.finalizeautostructurelevel
+
-- -- -- document -- -- --
local data -- the current state
@@ -170,15 +175,16 @@ end)
--
-sections.levelmap = sections.levelmap or { }
+sections.verbose = true
-local levelmap = sections.levelmap
-
-storage.register("structures/sections/levelmap", sections.levelmap, "structures.sections.levelmap")
+local sectionblockdata = sections.sectionblockdata or { }
+sections.sectionblockdata = sectionblockdata
-sections.verbose = true
+local levelmap = sections.levelmap or { }
+sections.levelmap = levelmap
+levelmap.block = -1
-levelmap.block = -1
+storage.register("structures/sections/levelmap", sections.levelmap, "structures.sections.levelmap")
function sections.setlevel(name,level) -- level can be number or parent (=string)
local l = tonumber(level)
@@ -196,17 +202,21 @@ function sections.getlevel(name)
return levelmap[name] or 0
end
-function sections.setblock(name)
+table.setmetatableindex(sectionblockdata,"table")
+
+function sections.setblock(name,settings)
local block = name or data.block or "unknown" -- can be used to set the default
data.block = block
+ sectionblockdata[block] = settings
return block
end
-function sections.pushblock(name)
+function sections.pushblock(name,settings)
counters.check(0) -- we assume sane usage of \page between blocks
local block = name or data.block
data.blocks[#data.blocks+1] = block
data.block = block
+ sectionblockdata[block] = settings
documents.reset()
return block
end
@@ -233,7 +243,7 @@ end
local saveset = { } -- experiment, see sections/tricky-001.tex
-function sections.somelevel(given)
+function sections.setentry(given)
-- old number
local numbers = data.numbers
@@ -450,7 +460,7 @@ function sections.structuredata(depth,key,default,honorcatcodetable) -- todo: sp
local data = data.status[depth]
local d
if data then
- if find(key,"%.") then
+ if find(key,".",1,true) then
d = accesstable(key,data)
else
d = data.titledata
@@ -462,7 +472,7 @@ function sections.structuredata(depth,key,default,honorcatcodetable) -- todo: sp
local metadata = data.metadata
local catcodes = metadata and metadata.catcodes
if catcodes then
- context.sprint(catcodes,d)
+ ctx_sprint(catcodes,d)
else
context(d)
end
@@ -471,7 +481,7 @@ function sections.structuredata(depth,key,default,honorcatcodetable) -- todo: sp
else
local catcodes = catcodenumbers[honorcatcodetable]
if catcodes then
- context.sprint(catcodes,d)
+ ctx_sprint(catcodes,d)
else
context(d)
end
@@ -506,14 +516,20 @@ function sections.current()
return data.status[data.depth]
end
-function sections.depthnumber(n)
+local function depthnumber(n)
local depth = data.depth
if not n or n == 0 then
n = depth
elseif n < 0 then
n = depth + n
end
- return context(data.numbers[n] or 0)
+ return data.numbers[n] or 0
+end
+
+sections.depthnumber = depthnumber
+
+function commands.depthnumber(n)
+ return context(depthnumber(n))
end
function sections.autodepth(numbers)
@@ -574,11 +590,11 @@ local function process(index,numbers,ownnumbers,criterium,separatorset,conversio
if ownnumber ~= "" then
applyprocessor(ownnumber)
elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups)
- context.convertnumber(conversion,number)
+ ctx_convertnumber(conversion,number)
else
local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
local data = startapplyprocessor(theconversion)
- context.convertnumber(data or "numbers",number)
+ ctx_convertnumber(data or "numbers",number)
stopapplyprocessor()
end
end
@@ -736,13 +752,13 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
applyprocessor(connector)
end
else
-if groupsuffix and kind ~= "prefix" then
- if result then
- result[#result+1] = strippedprocessor(groupsuffix)
- else
- applyprocessor(groupsuffix)
- end
-end
+ if groupsuffix and kind ~= "prefix" then
+ if result then
+ result[#result+1] = strippedprocessor(groupsuffix)
+ else
+ applyprocessor(groupsuffix)
+ end
+ end
if stopper then
if result then
result[#result+1] = strippedprocessor(stopper)
@@ -768,94 +784,104 @@ end
function sections.findnumber(depth,what) -- needs checking (looks wrong and slow too)
local data = data.status[depth or data.depth]
- if data then
- local index = data.references.section
- local collected = sections.collected
- local sectiondata = collected[index]
- if sectiondata and sectiondata.hidenumber ~= true then -- can be nil
- local quit = what == v_previous or what == v_next
- if what == v_first or what == v_previous then
- for i=index,1,-1 do
- local s = collected[i]
- if s then
- local n = s.numbers
- if #n == depth and n[depth] and n[depth] ~= 0 then
- sectiondata = s
- if quit then
- break
- end
- elseif #n < depth then
+ if not data then
+ return
+ end
+ local references = data.references
+ if not references then
+ return
+ end
+ local index = references.section
+ local collected = sections.collected
+ local sectiondata = collected[index]
+ if sectiondata and sectiondata.hidenumber ~= true then -- can be nil
+ local quit = what == v_previous or what == v_next
+ if what == v_first or what == v_previous then
+ for i=index,1,-1 do
+ local s = collected[i]
+ if s then
+ local n = s.numbers
+ if #n == depth and n[depth] and n[depth] ~= 0 then
+ sectiondata = s
+ if quit then
break
end
+ elseif #n < depth then
+ break
end
end
- elseif what == v_last or what == v_next then
- for i=index,#collected do
- local s = collected[i]
- if s then
- local n = s.numbers
- if #n == depth and n[depth] and n[depth] ~= 0 then
- sectiondata = s
- if quit then
- break
- end
- elseif #n < depth then
+ end
+ elseif what == v_last or what == v_next then
+ for i=index,#collected do
+ local s = collected[i]
+ if s then
+ local n = s.numbers
+ if #n == depth and n[depth] and n[depth] ~= 0 then
+ sectiondata = s
+ if quit then
break
end
+ elseif #n < depth then
+ break
end
end
end
- return sectiondata
end
+ return sectiondata
end
end
function sections.finddata(depth,what)
local data = data.status[depth or data.depth]
- if data then
- -- if sectiondata and sectiondata.hidenumber ~= true then -- can be nil
- local index = data.references.listindex
- if index then
- local collected = structures.lists.collected
- local quit = what == v_previous or what == v_next
- if what == v_first or what == v_previous then
- for i=index-1,1,-1 do
- local s = collected[i]
- if not s then
+ if not data then
+ return
+ end
+ local references = data.references
+ if not references then
+ return
+ end
+ local index = references.listindex
+ if not index then
+ return
+ end
+ local collected = structures.lists.collected
+ local quit = what == v_previous or what == v_next
+ if what == v_first or what == v_previous then
+ for i=index-1,1,-1 do
+ local s = collected[i]
+ if not s then
+ break
+ elseif s.metadata.kind == "section" then -- maybe check on name
+ local n = s.numberdata.numbers
+ if #n == depth and n[depth] and n[depth] ~= 0 then
+ data = s
+ if quit then
break
- elseif s.metadata.kind == "section" then -- maybe check on name
- local n = s.numberdata.numbers
- if #n == depth and n[depth] and n[depth] ~= 0 then
- data = s
- if quit then
- break
- end
- elseif #n < depth then
- break
- end
end
+ elseif #n < depth then
+ break
end
- elseif what == v_last or what == v_next then
- for i=index+1,#collected do
- local s = collected[i]
- if not s then
+ end
+ end
+ elseif what == v_last or what == v_next then
+ for i=index+1,#collected do
+ local s = collected[i]
+ if not s then
+ break
+ elseif s.metadata.kind == "section" then -- maybe check on name
+ local n = s.numberdata.numbers
+ if #n == depth and n[depth] and n[depth] ~= 0 then
+ data = s
+ if quit then
break
- elseif s.metadata.kind == "section" then -- maybe check on name
- local n = s.numberdata.numbers
- if #n == depth and n[depth] and n[depth] ~= 0 then
- data = s
- if quit then
- break
- end
- elseif #n < depth then
- break
- end
end
+ elseif #n < depth then
+ break
end
end
end
- return data
end
+ return data
end
function sections.internalreference(sectionname,what) -- to be used in pagebuilder (no marks used)
@@ -910,7 +936,7 @@ function commands.autonextstructurelevel(level)
else
for i=level,#levels do
if levels[i] then
- context.finalizeautostructurelevel()
+ ctx_finalizeauto()
levels[i] = false
end
end
@@ -921,7 +947,7 @@ end
function commands.autofinishstructurelevels()
for i=1,#levels do
if levels[i] then
- context.finalizeautostructurelevel()
+ ctx_finalizeauto()
end
end
levels = { }
@@ -929,8 +955,8 @@ end
-- interface (some are actually already commands, like sections.fullnumber)
-commands.structurenumber = function() sections.fullnumber() end
-commands.structuretitle = function() sections.title () end
+commands.structurenumber = sections.fullnumber
+commands.structuretitle = sections.title
commands.structurevariable = function(name) sections.structuredata(nil,name) end
commands.structureuservariable = function(name) sections.userdata (nil,name) end
@@ -938,15 +964,23 @@ commands.structurecatcodedget = function(name) sections.structured
commands.structuregivencatcodedget = function(name,catcode) sections.structuredata(nil,name,nil,catcode) end
commands.structureautocatcodedget = function(name,catcode) sections.structuredata(nil,name,nil,catcode) end
-commands.namedstructurevariable = function(depth,name) sections.structuredata(depth,name) end
-commands.namedstructureuservariable = function(depth,name) sections.userdata (depth,name) end
+commands.namedstructurevariable = sections.structuredata
+commands.namedstructureuservariable = sections.userdata
---
+commands.setsectionlevel = sections.setlevel
+commands.setsectionnumber = sections.setnumber
+commands.getsectionnumber = sections.getnumber
+commands.getfullsectionnumber = sections.fullnumber
+commands.getstructuredata = sections.structuredata
+commands.getcurrentsectionlevel = sections.getcurrentlevel
-function commands.setsectionblock (name) context(sections.setblock(name)) end
-function commands.pushsectionblock(name) context(sections.pushblock(name)) end
-function commands.popsectionblock () context(sections.popblock()) end
+commands.setsectionblock = sections.setblock
+commands.pushsectionblock = sections.pushblock
+commands.popsectionblock = sections.popblock
+commands.registersection = sections.register
+commands.setsectionentry = sections.setentry
+commands.reportstructure = sections.reportstructure
--
local byway = "^" .. v_by -- ugly but downward compatible
diff --git a/Master/texmf-dist/tex/context/base/strc-doc.mkiv b/Master/texmf-dist/tex/context/base/strc-doc.mkiv
index c8dfae1e422..98abfd6118e 100644
--- a/Master/texmf-dist/tex/context/base/strc-doc.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-doc.mkiv
@@ -20,7 +20,8 @@
%D This will move:
\unexpanded\def\setstructuresynchronization#1% todo: use ctxcontext
- {\xdef\currentstructureattribute {\ctxlua {tex.write(structures.references.setinternalreference("\currentstructurereferenceprefix","\currentstructurereference",\nextinternalreference,"\interactionparameter\c!focus"))}}%
+ {\ctxcommand{setinternalreference("\currentstructurereferenceprefix","\currentstructurereference",\nextinternalreference,"\interactionparameter\c!focus")}%
+ \xdef\currentstructureattribute {\the\lastdestinationattribute}%
\xdef\currentstructuresynchronize{\ctxlatecommand{enhancelist(#1)}}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-enu.mkvi b/Master/texmf-dist/tex/context/base/strc-enu.mkvi
index e369bc2e142..0a01d2637f5 100644
--- a/Master/texmf-dist/tex/context/base/strc-enu.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-enu.mkvi
@@ -370,6 +370,6 @@
\fi}
\unexpanded\def\strc_enumerations_skip_number_coupling[#tag]% e.g. for questions with no answer
- {\ctxlua{structures.references.setnextorder("construction","#tag")}}
+ {\ctxcommand{setnextreferenceorder("construction","#tag")}}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-flt.mkvi b/Master/texmf-dist/tex/context/base/strc-flt.mkvi
index 8122b953e2f..a939213177d 100644
--- a/Master/texmf-dist/tex/context/base/strc-flt.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-flt.mkvi
@@ -983,6 +983,8 @@
{\let\extrafloatlocation#rightpagelocation}%
{\let\extrafloatlocation#leftpagelocation}}
+\let\extrafloatlocation\empty
+
\installcorenamespace{extrafloataction}
\setvalue{\??extrafloataction \v!inner}#1{\strc_floats_set_extra_action\v!left \v!right}
@@ -1007,6 +1009,7 @@
\processcommacommand[\floatlocation]\strc_floats_check_extra_actions_step
\ifx\extrafloatlocation\empty \else
\edef\floatlocation{\extrafloatlocation,\floatlocation}%
+ \setfloatmethodvariables\floatlocation
\fi}}
\def\strc_floats_check_extra_actions_step#step%
@@ -2191,7 +2194,7 @@
\else
\directlocalfloatsparameter\c!inbetween
\fi
- \dontleavehmode\hbox{\foundbox\??localfloatstack\recurselevel}%
+ \dontleavehmode\hbox{\foundbox\??localfloatstack\recurselevel}% \restorebox...
\ifnum\recurselevel=\c_strc_localfloats_n\relax
\directlocalfloatsparameter\c!after
\fi}}
@@ -2200,8 +2203,11 @@
{\getlocalfloats
\resetlocalfloats}
+% \unexpanded\def\getlocalfloat#1%
+% {\normalexpanded{\foundbox{\??localfloatstack}{\number#1}}}% \vbox{\restorebox...}
+
\unexpanded\def\getlocalfloat#1%
- {\normalexpanded{\foundbox{\??localfloatstack}{\number#1}}}
+ {\foundbox\??localfloatstack{\number#1}} % \vbox{\restorebox...}
\unexpanded\def\forcelocalfloats
{\let\forcedfloatmethod\v!local}
diff --git a/Master/texmf-dist/tex/context/base/strc-ini.lua b/Master/texmf-dist/tex/context/base/strc-ini.lua
index fd7c10f7952..a48679e6fdf 100644
--- a/Master/texmf-dist/tex/context/base/strc-ini.lua
+++ b/Master/texmf-dist/tex/context/base/strc-ini.lua
@@ -20,12 +20,13 @@ but it does not make sense to store all processdata.
]]--
-local formatters = string.formatters
local lpegmatch = lpeg.match
-local count = tex.count
local type, next, tonumber, select = type, next, tonumber, select
-local settings_to_array, settings_to_hash = utilities.parsers.settings_to_array, utilities.parsers.settings_to_hash
-local allocate = utilities.storage.allocate
+
+local formatters = string.formatters
+local settings_to_array = utilities.parsers.settings_to_array
+local settings_to_hash = utilities.parsers.settings_to_hash
+local allocate = utilities.storage.allocate
local catcodenumbers = catcodes.numbers -- better use the context(...) way to switch
@@ -34,16 +35,22 @@ local xmlcatcodes = catcodenumbers.xmlcatcodes
local notcatcodes = catcodenumbers.notcatcodes
local txtcatcodes = catcodenumbers.txtcatcodes
-local context, commands = context, commands
-
-local pushcatcodes = context.pushcatcodes
-local popcatcodes = context.popcatcodes
+local context = context
+local commands = commands
local trace_processors = false
local report_processors = logs.reporter("processors","structure")
trackers.register("typesetters.processors", function(v) trace_processors = v end)
+local xmlconvert = lxml.convert
+local xmlstore = lxml.store
+
+local ctx_pushcatcodes = context.pushcatcodes
+local ctx_popcatcodes = context.popcatcodes
+local ctx_xmlsetup = context.xmlsetup
+local ctx_xmlprocessbuffer = context.xmlprocessbuffer
+
-- -- -- namespace -- -- --
-- This is tricky: we have stored and initialized already some of
@@ -149,11 +156,17 @@ local function simplify(d,nodefault)
for k, v in next, d do
local tv = type(v)
if tv == "table" then
- if next(v) then t[k] = simplify(v) end
+ if next(v) then
+ t[k] = simplify(v)
+ end
elseif tv == "string" then
- if v ~= "" and v ~= "default" then t[k] = v end
+ if v ~= "" and v ~= "default" then
+ t[k] = v
+ end
elseif tv == "boolean" then
- if v then t[k] = v end
+ if v then
+ t[k] = v
+ end
else
t[k] = v
end
@@ -166,6 +179,34 @@ local function simplify(d,nodefault)
end
end
+-- we only care about the tuc file so this would do too:
+--
+-- local function simplify(d,nodefault)
+-- if d then
+-- for k, v in next, d do
+-- local tv = type(v)
+-- if tv == "string" then
+-- if v == "" or v == "default" then
+-- d[k] = nil
+-- end
+-- elseif tv == "table" then
+-- if next(v) then
+-- simplify(v)
+-- end
+-- elseif tv == "boolean" then
+-- if not v then
+-- d[k] = nil
+-- end
+-- end
+-- end
+-- return d
+-- elseif nodefault then
+-- return nil
+-- else
+-- return { }
+-- end
+-- end
+
helpers.simplify = simplify
function helpers.merged(...)
@@ -209,19 +250,19 @@ function helpers.title(title,metadata) -- coding is xml is rather old and not th
report_processors("putting xml data in buffer: %s",xmldata)
report_processors("processing buffer with setup %a and tag %a",xmlsetup,tag)
end
- if experiment then
- -- the question is: will this be forgotten ... better store in a via file
- local xmltable = lxml.convert("temp",xmldata or "")
- lxml.store("temp",xmltable)
- context.xmlsetup("temp",xmlsetup or "")
- else
- context.xmlprocessbuffer("dummy",tag,xmlsetup or "")
- end
+ if experiment then
+ -- the question is: will this be forgotten ... better store in a via file
+ local xmltable = xmlconvert("temp",xmldata or "")
+ xmlstore("temp",xmltable)
+ ctx_xmlsetup("temp",xmlsetup or "")
+ else
+ ctx_xmlprocessbuffer("dummy",tag,xmlsetup or "")
+ end
elseif xmlsetup then -- title is reference to node (so \xmlraw should have been used)
if trace_processors then
report_processors("feeding xmlsetup %a using node %a",xmlsetup,title)
end
- context.xmlsetup(title,metadata.xmlsetup)
+ ctx_xmlsetup(title,metadata.xmlsetup)
else
local catcodes = metadata.catcodes
if catcodes == notcatcodes or catcodes == xmlcatcodes then
@@ -239,9 +280,9 @@ function helpers.title(title,metadata) -- coding is xml is rather old and not th
-- doesn't work when a newline is in there \section{Test\ A} so we do
-- it this way:
--
- pushcatcodes(catcodes)
+ ctx_pushcatcodes(catcodes)
context(title)
- popcatcodes()
+ ctx_popcatcodes()
end
end
else
diff --git a/Master/texmf-dist/tex/context/base/strc-itm.lua b/Master/texmf-dist/tex/context/base/strc-itm.lua
index 8a745f356fb..4945c282fba 100644
--- a/Master/texmf-dist/tex/context/base/strc-itm.lua
+++ b/Master/texmf-dist/tex/context/base/strc-itm.lua
@@ -10,19 +10,19 @@ local structures = structures
local itemgroups = structures.itemgroups
local jobpasses = job.passes
-local setfield = jobpasses.save
-local getfield = jobpasses.getfield
+local setvariable = jobpasses.save
+local getvariable = jobpasses.getfield
function itemgroups.register(name,nofitems,maxwidth)
- setfield("itemgroup", { nofitems, maxwidth })
+ setvariable("itemgroup", { nofitems, maxwidth })
end
function itemgroups.nofitems(name,index)
- return getfield("itemgroup", index, 1, 0)
+ return getvariable("itemgroup", index, 1, 0)
end
function itemgroups.maxwidth(name,index)
- return getfield("itemgroup", index, 2, 0)
+ return getvariable("itemgroup", index, 2, 0)
end
-- interface (might become counter/dimension)
@@ -30,9 +30,9 @@ end
commands.registeritemgroup = itemgroups.register
function commands.nofitems(name,index)
- context(getfield("itemgroup", index, 1, 0))
+ context(getvariable("itemgroup", index, 1, 0))
end
function commands.maxitemwidth(name,index)
- context(getfield("itemgroup", index, 2, 0))
+ context(getvariable("itemgroup", index, 2, 0))
end
diff --git a/Master/texmf-dist/tex/context/base/strc-itm.mkvi b/Master/texmf-dist/tex/context/base/strc-itm.mkvi
index 85ec4bc456d..098b863b95a 100644
--- a/Master/texmf-dist/tex/context/base/strc-itm.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-itm.mkvi
@@ -331,7 +331,7 @@
\def\strc_itemgroups_store_continue_state#options#settings%
{\setxvalue{\??itemgroupoption \currentitemgroup}{\strc_itemgroups_process_options{#options}}%
- \setgvalue{\??itemgroupsetting\currentitemgroup}{\setupcurrentitemgroup [#settings]}}
+ \setgvalue{\??itemgroupsetting\currentitemgroup}{\setupcurrentitemgroup[#settings]}}
\def\strc_itemgroups_fetch_continue_state
{\getvalue{\??itemgroupoption \currentitemgroup}%
@@ -408,7 +408,6 @@
\settrue\c_strc_itemgroups_inline
\settrue\c_strc_itemgroups_joined
\strc_itemgroups_process_set_option_pack}
-\setvalue{\??itemgroupkeyword\v!columns }{\strc_itemgroups_process_set_option_pack}
\setvalue{\??itemgroupkeyword\v!before }{\settrue\c_strc_itemgroups_before}
\setvalue{\??itemgroupkeyword\v!after }{\settrue\c_strc_itemgroups_after}
\setvalue{\??itemgroupkeyword\v!nowhite }{\settrue\c_strc_itemgroups_nowhite}
@@ -1010,8 +1009,21 @@
\strc_itemgroups_between_command
\fi}
-\unexpanded\def\strc_itemgroups_start_item[#1]% we can reuse more
- {\def\currentitemreference{#1}%
+% c_strc_itemgroups_concat:
+%
+% the problem is that we use leftskip so concat cannot reliable take the height into
+% account; it's .. rather tricky when white space in there anyway (due to \par) .. so
+% we rely on a special blank method
+%
+% \startitemize[n]
+% \item bla
+% \item \startitemize[a]
+% \item bla $\displaystyle\int^{x^{y^4}}$ \item bla
+% \stopitemize
+% \stopitemize
+
+\unexpanded\def\strc_itemgroups_start_item[#reference]% we can reuse more
+ {\def\currentitemreference{#reference}%
\ifconditional\c_strc_itemgroups_text
% begin of item
\else
@@ -1027,10 +1039,12 @@
\strc_itemgroups_start_item_next
\fi
\ifconditional\c_strc_itemgroups_concat
- % \vskip-\dimexpr\lastskip+\lineheight\relax
- \vskip-\lastskip % we cannot use a \dimexpr here because
- \vskip-\lineheight % then we loose the stretch and shrink
- \nobreak
+ % \vskip-\lastskip % we cannot use a \dimexpr here because
+ % \vskip-\lineheight % then we loose the stretch and shrink
+ % \nobreak
+ %
+ \blank[\v!overlay]% new per 2014-03-27
+ %
\setfalse\c_strc_itemgroups_concat
\fi
\dostarttagged\t!item\empty
@@ -1205,7 +1219,8 @@
\def\strc_itemgroups_handle_lapped_item_positive
{\llap
- {\hbox to \d_strc_itemgroups_list_width
+ {\dontcomplain
+ \hbox to \d_strc_itemgroups_list_width
{\ifconditional\c_strc_itemgroups_sub
\llap{+\enspace}%
\fi
@@ -1246,7 +1261,7 @@
\strc_itemgroups_start_head}
\def\strc_itemgroups_make_symbol_box
- {\setbox\b_strc_itemgroups\hbox
+ {\setbox\b_strc_itemgroups\autodirhbox
{\ifconditional\c_strc_itemgroups_head
\ifconditional\c_strc_itemgroups_symbol
\strc_itemgroups_insert_extra_reference
@@ -1444,6 +1459,19 @@
\fi
+\relaxvalueifundefined \v!item
+\relaxvalueifundefined \v!sub
+\relaxvalueifundefined \v!sym
+\relaxvalueifundefined \v!ran
+\relaxvalueifundefined \v!head
+\relaxvalueifundefined \v!its
+\relaxvalueifundefined \v!mar
+\relaxvalueifundefined \v!txt
+\relaxvalueifundefined {\e!start\v!item}
+\relaxvalueifundefined {\e!stop \v!item}
+\relaxvalueifundefined {\e!start\v!head}
+\relaxvalueifundefined {\e!stop \v!head}
+
%D A nice example of a plugin:
%D
%D \startbuffer
diff --git a/Master/texmf-dist/tex/context/base/strc-lab.mkiv b/Master/texmf-dist/tex/context/base/strc-lab.mkiv
index ce4cdcc5ec6..3e661712632 100644
--- a/Master/texmf-dist/tex/context/base/strc-lab.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-lab.mkiv
@@ -58,10 +58,15 @@
{\normalexpanded{\defineconstruction[#1][#3][\s!handler=\v!label,\c!level=#2]}%
\setevalue{\??label#1:\s!parent}{\??label#3}}%
\ifconditional\c_strc_constructions_define_commands
- \setuevalue{\e!next #1}{\strc_labels_next {#1}{\number#2}}% obsolete
- \setuevalue{\c!reset#1}{\strc_labels_reset {#1}{\number#2}}% obsolete
- %setuevalue{\c!set #1}{\strc_labels_set {#1}{\number#2}}% obsolete
- \setuevalue {#1}{\strc_labels_command{#1}}%
+ \setuevalue{\e!next #1}{\strc_labels_next {#1}{\number#2}}% obsolete
+ \setuevalue{\v!reset #1}{\strc_labels_reset {#1}{\number#2}}% obsolete % should be \e!reset anyway
+ %setuevalue{\c!set #1}{\strc_labels_set {#1}{\number#2}}% obsolete
+ \ifcsname\v!current#1\endcsname
+ % we play safe
+ \else
+ \setuevalue{\v!current#1}{\strc_labels_current{#1}}% % obsolete % should be \e!current anyway
+ \fi
+ \setuevalue {#1}{\strc_labels_command{#1}}%
\fi}
% todo: \strc_labels_command for user
@@ -103,6 +108,8 @@
\let\p_strc_constructions_title \empty
\let\p_strc_constructions_number\empty
+\newconditional\c_strc_constructions_number_keep
+
\setvalue{\??constructioninitializer\v!label}%
{\let\currentlabel \currentconstruction
\let\constructionparameter \labelparameter
@@ -117,7 +124,9 @@
\iftrialtypesetting
\strc_counters_save\currentconstructionnumber
\fi
- \strc_counters_increment_sub\currentconstructionnumber\currentconstructionlevel
+ \ifconditional\c_strc_constructions_number_keep \else
+ \strc_counters_increment_sub\currentconstructionnumber\currentconstructionlevel
+ \fi
\else
\setfalse\c_strc_constructions_number_state
\fi
@@ -137,11 +146,12 @@
%D Interfaces:
-\let\strc_labels_command\strc_descriptions_command
+\unexpanded\def\strc_labels_command{\setfalse\c_strc_constructions_number_keep\strc_descriptions_command}
+\unexpanded\def\strc_labels_current{\settrue \c_strc_constructions_number_keep\strc_descriptions_command}
-\unexpanded\def\strc_labels_next {\strc_constructions_next_indeed \namedlabelparameter} % #1#2
-\unexpanded\def\strc_labels_reset{\strc_constructions_reset_indeed\namedlabelparameter} % #1#2
-%unexpanded\def\strc_labels_set {\strc_constructions_set_indeed \namedlabelparameter} % #1#2
+\unexpanded\def\strc_labels_next {\strc_constructions_next_indeed \namedlabelparameter} % #1#2
+\unexpanded\def\strc_labels_reset {\strc_constructions_reset_indeed\namedlabelparameter} % #1#2
+%unexpanded\def\strc_labels_set {\strc_constructions_set_indeed \namedlabelparameter} % #1#2
% similar to enumerations
diff --git a/Master/texmf-dist/tex/context/base/strc-lev.lua b/Master/texmf-dist/tex/context/base/strc-lev.lua
index 50a63c93879..947889e1e1c 100644
--- a/Master/texmf-dist/tex/context/base/strc-lev.lua
+++ b/Master/texmf-dist/tex/context/base/strc-lev.lua
@@ -8,6 +8,9 @@ if not modules then modules = { } end modules ['strc-lev'] = {
local insert, remove = table.insert, table.remove
+local context = context
+local commands = commands
+
local sections = structures.sections
local default = interfaces.variables.default
diff --git a/Master/texmf-dist/tex/context/base/strc-lnt.mkvi b/Master/texmf-dist/tex/context/base/strc-lnt.mkvi
index 4a2cd1cc0a3..f84521002e1 100644
--- a/Master/texmf-dist/tex/context/base/strc-lnt.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-lnt.mkvi
@@ -11,6 +11,8 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% todo: mkvi #
+
\writestatus{loading}{ConTeXt Structure Macros / Line Notes}
%D This module loads on top of the footnote and line numbering macros.
@@ -22,6 +24,7 @@
\let\setuplinenote\setupnote
\newcount\c_strc_linenotes
+\newtoks\everydefinelinenote
\unexpanded\def\definelinenote
{\dotripleempty\strc_linenotes_define}
@@ -42,10 +45,14 @@
\else
\definenote[#1]%
\fi\fi
- \expandafter\let\csname\??linenote#1\expandafter\endcsname\csname#1\endcsname % use copy command
- \setuevalue {#1}{\strc_linenotes_direct{#1}}%
- \setuevalue{\e!start#1}{\strc_linenotes_start {#1}}%
- \setuevalue{\e!stop #1}{\strc_linenotes_stop }%
+ \pushmacro\currentnote
+ \edef\currentnote{#1}
+ \expandafter\let\csname\??linenote\currentnote\expandafter\endcsname\csname\currentnote\endcsname % use copy command
+ \setuevalue {\currentnote}{\strc_linenotes_direct{\currentnote}}%
+ \setuevalue{\e!start\currentnote}{\strc_linenotes_start {\currentnote}}%
+ \setuevalue{\e!stop \currentnote}{\strc_linenotes_stop }%
+ \the\everydefinelinenote
+ \popmacro\currentnote
\fi}
\unexpanded\def\strc_linenotes_direct#1#2%
@@ -79,6 +86,8 @@
\letvalue\??linenotespreviousfrom\empty
\letvalue\??linenotespreviousto \empty
+% maybe do this in lua
+
\def\page_lines_in_from{\in[lr:b:\currentlinenotereference]}
\def\page_lines_in_to {\in[lr:e:\currentlinenotereference]}
@@ -109,26 +118,19 @@
\ifconditional\c_page_lines_current_to
\xdef\m_page_lines_current_to{\currentreferencelinenumber}%
\ifx\m_page_lines_previous_from\m_page_lines_current_from
- \ifx\m_page_lines_previous_to\m_page_lines_current_to
- \notationparameter\c!compressseparator
- \else
+ \ifx\m_page_lines_previous_to\m_page_lines_current_to \else
\page_lines_in_from
+ \ifx\m_page_lines_current_from\m_page_lines_current_to\else\endash\page_lines_in_to\fi
\fi
\else
\page_lines_in_from
- \ifx\m_page_lines_current_from\m_page_lines_current_to
- \else
- \endash
- \page_lines_in_to
- \fi
+ \ifx\m_page_lines_current_from\m_page_lines_current_to\else\endash\page_lines_in_to\fi
\fi
\else
\page_lines_in_from
\fi
\else
- \ifx\m_page_lines_previous_from\m_page_lines_current_from
- \notationparameter\c!compressseparator
- \else
+ \ifx\m_page_lines_previous_from\m_page_lines_current_from \else
\page_lines_in_from
\fi
\fi}
@@ -142,9 +144,7 @@
\edef\currentlinenotereference{#2}%
\xdef\m_page_lines_previous_from{\csname\??linenotespreviousfrom\ifcsname\??linenotespreviousfrom\currentnotation\endcsname\currentnotation\fi\endcsname}%
\xdef\m_page_lines_previous_to {\csname\??linenotespreviousto \ifcsname\??linenotespreviousto \currentnotation\endcsname\currentnotation\fi\endcsname}%
- \doifelse{\notationparameter\c!compress}\v!yes
- {\let\linenotelinenumber\strc_linenotes_range_sparse}%
- {\let\linenotelinenumber\strc_linenotes_range_normal}%
+ \strc_linenotes_check_compression
\let\currentnote\currentnotation
\letnotationparameter\c!numbercommand\linenotelinenumber% todo: deep hook
\letnoteparameter \c!textcommand \gobbleoneargument % todo: deep hook
@@ -153,6 +153,73 @@
\expandafter\glet\csname\??linenotespreviousto \currentnotation\endcsname\m_page_lines_current_to
\endgroup}
+% compression
+
+\installcorenamespace{linenotescompressmethod}
+
+% compress=yes|no
+% compressmethod=separator|stopper
+
+\def\c!compressdistance{compressdistance}
+\def\c!compressmethod {compressmethod}
+\def\c!compressstopper {compressstopper} % c
+\def\v!compressstopper {compressstopper} % v
+
+\def\v!separator {separator} % v
+
+\setvalue{\??linenotescompressmethod\v!separator}%
+ {\edef\p_compressseparator{\noteparameter\c!compressseparator}%
+ \scratchskip\noteparameter\c!compressdistance\relax
+ \ifx\p_compressseparator\empty
+ \hskip\scratchskip
+ \else
+ \hskip.5\scratchskip
+ \p_compressseparator
+ \hskip.5\scratchskip
+ \fi}
+
+\setvalue{\??linenotescompressmethod\v!stopper}%
+ {\edef\p_compressstopper{\noteparameter\c!compressstopper}%
+ \scratchskip\noteparameter\c!compressdistance\relax
+ \ifx\p_compressstopper\empty
+ \hskip\scratchskip
+ \else
+ \p_compressstopper
+ \hskip.5\scratchskip
+ \fi}
+
+\setvalue{\??linenotescompressmethod\v!space}%
+ {\hskip\noteparameter\c!compressdistance\relax}
+
+\def\strc_linenotes_check_compression
+% {\edef\p_linenotes_compress {\notationparameter\c!compress}%
+% \edef\p_linenotes_compressmethod{\notationparameter\c!compressmethod}%
+ {\edef\p_linenotes_compress {\noteparameter\c!compress}%
+ \edef\p_linenotes_compressmethod{\noteparameter\c!compressmethod}%
+ \ifx\p_linenotes_compress\v!yes
+ \let\linenotelinenumber\strc_linenotes_range_sparse
+ \else
+ \let\linenotelinenumber\strc_linenotes_range_normal
+ \fi
+ \ifcsname\??linenotescompressmethod\p_linenotes_compressmethod\endcsname \else
+ \let\p_linenotes_compressmethod\v!space
+ \fi}
+
+\def\strc_linenotes_inbetween % \ifcsname\??linenote\currentnote\expandafter\endcsname
+ {\csname\??linenotescompressmethod\p_linenotes_compressmethod\endcsname}
+
+\def\strc_notes_compress_distance{\emwidth \s!plus .5\emwidth \s!minus .25\emwidth}
+
+\setupnotes
+ [%c\compress=\v!no,
+ \c!compressdistance=\strc_notes_compress_distance,
+ \c!compressseparator=\symbol{\v!compressseparator},
+ \c!compressstopper=\symbol{\v!compressstopper}]
+
+\appendtoks
+ \letnoteparameter\c!inbetween\strc_linenotes_inbetween
+\to \everydefinelinenote
+
% where to hook this one in? resetcounter has no hook:
\unexpanded\def\doresetlinenotecompression#1% \strc_linenotes_reset_previous
@@ -161,11 +228,15 @@
\definesymbol
[\v!compressseparator]
- [\space\hbox{\vl\thinspace\vl}]
+ [\hbox{\vl\thinspace\vl}] % \space removed
-\setupnotations
- [%c\compress=\v!no,
- \c!compressseparator=\symbol\v!compressseparator]
+\definesymbol
+ [\v!compressstopper]
+ [,]
+
+% \setupnotations
+% [%c\compress=\v!no,
+% \c!compressseparator=\symbol\v!compressseparator]
\let\strc_linenotes_traced\gobbleoneargument
diff --git a/Master/texmf-dist/tex/context/base/strc-lst.lua b/Master/texmf-dist/tex/context/base/strc-lst.lua
index 305b6a6fa5b..16160e2735f 100644
--- a/Master/texmf-dist/tex/context/base/strc-lst.lua
+++ b/Master/texmf-dist/tex/context/base/strc-lst.lua
@@ -16,8 +16,7 @@ if not modules then modules = { } end modules ['strc-lst'] = {
-- move more to commands
local format, gmatch, gsub = string.format, string.gmatch, string.gsub
-local tonumber = tonumber
-local texcount = tex.count
+local tonumber, type = tonumber, type
local concat, insert, remove = table.concat, table.insert, table.remove
local lpegmatch = lpeg.match
local simple_hash_to_string, settings_to_hash = utilities.parsers.simple_hash_to_string, utilities.parsers.settings_to_hash
@@ -27,6 +26,11 @@ local trace_lists = false trackers.register("structures.lists", function(
local report_lists = logs.reporter("structure","lists")
+local context = context
+local commands = commands
+
+local texgetcount = tex.getcount
+
local structures = structures
local lists = structures.lists
local sections = structures.sections
@@ -45,11 +49,14 @@ lists.collected = collected
lists.tobesaved = tobesaved
lists.enhancers = lists.enhancers or { }
-lists.internals = allocate(lists.internals or { }) -- to be checked
+-----.internals = allocate(lists.internals or { }) -- to be checked
lists.ordered = allocate(lists.ordered or { }) -- to be checked
lists.cached = cached
lists.pushed = pushed
+local sectionblocks = allocate()
+lists.sectionblocks = sectionblocks
+
references.specials = references.specials or { }
local variables = interfaces.variables
@@ -81,15 +88,25 @@ local function initializer()
local collected = lists.collected
local internals = checked(references.internals)
local ordered = lists.ordered
+ local usedinternals = references.usedinternals
+ local blockdone = { }
for i=1,#collected do
local c = collected[i]
local m = c.metadata
local r = c.references
if m then
-- access by internal reference
- local internal = r and r.internal
- if internal then
- internals[internal] = c
+ if r then
+ local internal = r.internal
+ if internal then
+ internals[internal] = c
+ usedinternals[internal] = r.used
+ end
+ local block = r.block
+ if block and not blockdone[block] then
+ blockdone[block] = true
+ sectionblocks[#sectionblocks+1] = block
+ end
end
-- access by order in list
local kind, name = m.kind, m.name
@@ -113,7 +130,22 @@ local function initializer()
end
end
-job.register('structures.lists.collected', tobesaved, initializer)
+local function finalizer()
+ local flaginternals = references.flaginternals
+ local usedviews = references.usedviews
+ for i=1,#tobesaved do
+ local r = tobesaved[i].references
+ if r then
+ local i = r.internal
+ local f = flaginternals[i]
+ if f then
+ r.used = usedviews[i] or true
+ end
+ end
+ end
+end
+
+job.register('structures.lists.collected', tobesaved, initializer, finalizer)
local groupindices = table.setmetatableindex("table")
@@ -122,24 +154,31 @@ function lists.groupindex(name,group)
return groupindex and groupindex[group] or 0
end
-function lists.addto(t)
+-- we could use t (as hash key) in order to check for dup entries
+
+function lists.addto(t) -- maybe more more here (saves parsing at the tex end)
local m = t.metadata
local u = t.userdata
if u and type(u) == "string" then
- t.userdata = helpers.touserdata(u) -- nicer at the tex end
+ t.userdata = helpers.touserdata(u)
end
local numberdata = t.numberdata
local group = numberdata and numberdata.group
+ local name = m.name
if not group then
-- forget about it
elseif group == "" then
group, numberdata.group = nil, nil
else
- local groupindex = groupindices[m.name][group]
+ local groupindex = groupindices[name][group]
if groupindex then
numberdata.numbers = cached[groupindex].numberdata.numbers
end
end
+ local setcomponent = references.setcomponent
+ if setcomponent then
+ setcomponent(t) -- can be inlined
+ end
local r = t.references
local i = r and r.internal or 0 -- brrr
local p = pushed[i]
@@ -149,12 +188,11 @@ function lists.addto(t)
pushed[i] = p
r.listindex = p
end
- local setcomponent = references.setcomponent
- if setcomponent then
- setcomponent(t) -- might move to the tex end
- end
if group then
- groupindices[m.name][group] = p
+ groupindices[name][group] = p
+ end
+ if trace_lists then
+ report_lists("added %a, internal %a",name,p)
end
return p
end
@@ -181,10 +219,17 @@ end
-- this is the main pagenumber enhancer
+local enhanced = { }
+
function lists.enhance(n)
- -- todo: symbolic names for counters
local l = cached[n]
- if l then
+ if not l then
+ report_lists("enhancing %a, unknown internal",n)
+ elseif enhanced[n] then
+ if trace_lists then
+ report_lists("enhancing %a, name %a, duplicate ignored",n,name)
+ end
+ else
local metadata = l.metadata
local references = l.references
--
@@ -192,23 +237,27 @@ function lists.enhance(n)
-- save in the right order (happens at shipout)
lists.tobesaved[#lists.tobesaved+1] = l
-- default enhancer (cross referencing)
- references.realpage = texcount.realpageno
+ references.realpage = texgetcount("realpageno")
-- tags
local kind = metadata.kind
local name = metadata.name
+ if trace_lists then
+ report_lists("enhancing %a, name %a",n,name)
+ end
if references then
-- is this used ?
local tag = tags.getid(kind,name)
if tag and tag ~= "?" then
references.tag = tag
end
- --~ references.listindex = n
end
-- specific enhancer (kind of obsolete)
local enhancer = kind and lists.enhancers[kind]
if enhancer then
enhancer(l)
end
+ --
+ enhanced[n] = true
return l
end
end
diff --git a/Master/texmf-dist/tex/context/base/strc-lst.mkvi b/Master/texmf-dist/tex/context/base/strc-lst.mkvi
index 15a499c8b15..0008f060231 100644
--- a/Master/texmf-dist/tex/context/base/strc-lst.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-lst.mkvi
@@ -126,7 +126,7 @@
{\endgroup}
\def\strc_lists_inject_yes[#settings][#userdata]%
- {\setupcurrentlist[\c!type=userdata,\c!location=\v!none,#settings]% grouped
+ {\setupcurrentlist[\c!type=userdata,\c!location=\v!none,#settings]% grouped (use \let...
\edef\p_location{\listparameter\c!location}%
\setnextinternalreference
\edef\currentlistnumber{\ctxcommand{addtolist{
@@ -147,8 +147,8 @@
\ifx\p_location\v!here
% this branch injects nodes !
\expanded{\ctxlatecommand{enhancelist(\currentlistnumber)}}%
- \ctxlua{structures.references.setinternalreference(nil,nil,\nextinternalreference)}% will change
- \xdef\currentstructurelistattribute{\number\lastdestinationattribute}%
+ \ctxcommand{setinternalreference(nil,nil,\nextinternalreference)}% will change
+ \xdef\currentstructurelistattribute{\the\lastdestinationattribute}%
\dontleavehmode\hbox attr \destinationattribute \lastdestinationattribute{}% todo
\else
% and this one doesn't
@@ -889,6 +889,7 @@
\startsetups[\??listrenderings:abc]
\endgraf % are we grouped?
+% \advance % yes or no ... \rightskip is also honored
\leftskip\listparameter\c!margin % after \endgraf !
\listparameter\c!before
\endgraf
@@ -1049,7 +1050,7 @@
\listparameter\c!numbercommand\currentlistsymbol
\listparameter\c!right
\endgroup
- \kern.5em
+ \kern.5\emwidth\relax
\nobreak
\fi
\fi
@@ -1068,7 +1069,7 @@
\ifconditional\c_lists_has_page
\ifconditional\c_lists_show_page
\nobreak
- \hskip.75em\relax
+ \hskip.75\emwidth\relax
\nobreak
\strc_lists_set_reference_attribute\v!pagenumber
\strc_lists_set_style_color\c!pagestyle\c!pagecolor\v!pagenumber
@@ -1262,6 +1263,8 @@
% \resetinteractionparameter\c!contrastcolor
\fi}
+\let\strc_lists_set_style_color\strc_lists_set_style_color_normal
+
%D A helper:
\def\strc_lists_limitated_text#text%
diff --git a/Master/texmf-dist/tex/context/base/strc-mar.lua b/Master/texmf-dist/tex/context/base/strc-mar.lua
index 7b3ac11e137..9c6259de4c4 100644
--- a/Master/texmf-dist/tex/context/base/strc-mar.lua
+++ b/Master/texmf-dist/tex/context/base/strc-mar.lua
@@ -12,19 +12,34 @@ if not modules then modules = { } end modules ['strc-mar'] = {
local insert, concat = table.insert, table.concat
local tostring, next, rawget = tostring, next, rawget
local lpegmatch = lpeg.match
-local match = string.match
+
+local context = context
+local commands = commands
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getbox = nuts.getbox
+
+local traversenodes = nuts.traverse
+
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
-local traversenodes = node.traverse
local texsetattribute = tex.setattribute
-local texbox = tex.box
local a_marks = attributes.private("structure","marks")
@@ -63,6 +78,10 @@ local lists = structures.lists
local settings_to_array = utilities.parsers.settings_to_array
+local boxes_too = false -- at some point we can also tag boxes or use a zero char
+
+directives.register("marks.boxestoo", function(v) boxes_too = v end)
+
marks.data = marks.data or allocate()
storage.register("structures/marks/data", marks.data, "structures.marks.data")
@@ -100,9 +119,9 @@ end
local function sweep(head,first,last)
for n in traversenodes(head) do
- local id = n.id
+ local id = getid(n)
if id == glyph_code then
- local a = n[a_marks]
+ local a = getattr(n,a_marks)
if not a then
-- next
elseif first == 0 then
@@ -111,17 +130,19 @@ local function sweep(head,first,last)
last = a
end
elseif id == hlist_code or id == vlist_code then
- local a = n[a_marks]
- if not a then
- -- next
- elseif first == 0 then
- first, last = a, a
- elseif a > last then
- last = a
+ if boxes_too then
+ local a = getattr(n,a_marks)
+ if not a then
+ -- next
+ elseif first == 0 then
+ first, last = a, a
+ elseif a > last then
+ last = a
+ end
end
- local list = n.list
+ local list = getlist(n)
if list then
- first, last = sweep(list, first, last)
+ first, last = sweep(list,first,last)
end
end
end
@@ -135,9 +156,9 @@ setmetatableindex(classes, function(t,k) local s = settings_to_array(k) t[k] = s
local lasts = { }
function marks.synchronize(class,n,option)
- local box = texbox[n]
+ local box = getbox(n)
if box then
- local first, last = sweep(box.list,0,0)
+ local first, last = sweep(getlist(box),0,0)
if option == v_keep and first == 0 and last == 0 then
if trace_marks_get or trace_marks_set then
report_marks("action %a, class %a, box %a","retain at synchronize",class,n)
@@ -151,11 +172,16 @@ function marks.synchronize(class,n,option)
for i=1,#classlist do
local class = classlist[i]
local range = ranges[class]
- if not range then
- range = { }
+ if range then
+ range.first = first
+ range.last = last
+ else
+ range = {
+ first = first,
+ last = last,
+ }
ranges[class] = range
end
- range.first, range.last = first, last
if trace_marks_get or trace_marks_set then
report_marks("action %a, class %a, first %a, last %a","synchronize",class,range.first,range.last)
end
@@ -520,20 +546,22 @@ local function do_first(name,range,check)
report_marks("action %a, name %a, range %a","resolving first",name,range)
end
local f_value, f_index, f_found = doresolve(name,range,false,0,0,check)
- if trace_marks_get then
- report_marks("action %a, name %a, range %a","resolving last",name,range)
- end
- local l_value, l_index, l_found = doresolve(name,range,true ,0,0,check)
- if f_found and l_found and l_index > f_index then
- local name = parentname(name)
- for i=f_index,l_index,1 do
- local si = stack[i]
- local sn = si[name]
- if sn and sn ~= false and sn ~= true and sn ~= "" and sn ~= f_value then
- if trace_marks_get then
- report_marks("action %a, name %a, range %a, index %a, value %a","resolving",name,range,i,sn)
+ if f_found then
+ if trace_marks_get then
+ report_marks("action %a, name %a, range %a","resolving last",name,range)
+ end
+ local l_value, l_index, l_found = doresolve(name,range,true ,0,0,check)
+ if l_found and l_index > f_index then
+ local name = parentname(name)
+ for i=f_index,l_index,1 do
+ local si = stack[i]
+ local sn = si[name]
+ if sn and sn ~= false and sn ~= true and sn ~= "" and sn ~= f_value then
+ if trace_marks_get then
+ report_marks("action %a, name %a, range %a, index %a, value %a","resolving",name,range,i,sn)
+ end
+ return sn, i, si
end
- return sn, i, si
end
end
end
@@ -544,24 +572,26 @@ local function do_first(name,range,check)
end
local function do_last(name,range,check)
- if trace_marks_get then
- report_marks("action %a, name %a, range %a","resolving first",name,range)
- end
- local f_value, f_index, f_found = doresolve(name,range,false,0,0,check)
if trace_marks_get then
report_marks("action %a, name %a, range %a","resolving last",name,range)
end
local l_value, l_index, l_found = doresolve(name,range,true ,0,0,check)
- if f_found and l_found and l_index > f_index then
- local name = parentname(name)
- for i=l_index,f_index,-1 do
- local si = stack[i]
- local sn = si[name]
- if sn and sn ~= false and sn ~= true and sn ~= "" and sn ~= l_value then
- if trace_marks_get then
- report_marks("action %a, name %a, range %a, index %a, value %a","resolving",name,range,i,sn)
+ if l_found then
+ if trace_marks_get then
+ report_marks("action %a, name %a, range %a","resolving first",name,range)
+ end
+ local f_value, f_index, f_found = doresolve(name,range,false,0,0,check)
+ if f_found and l_index > f_index then
+ local name = parentname(name)
+ for i=l_index,f_index,-1 do
+ local si = stack[i]
+ local sn = si[name]
+ if sn and sn ~= false and sn ~= true and sn ~= "" and sn ~= l_value then
+ if trace_marks_get then
+ report_marks("action %a, name %a, range %a, index %a, value %a","resolving",name,range,i,sn)
+ end
+ return sn, i, si
end
- return sn, i, si
end
end
end
@@ -659,8 +689,10 @@ function marks.fetchallmarks(name,range) fetchallmarks(name,range )
-- here we have a few helpers .. will become commands.*
+local pattern = lpeg.afterprefix("li::")
+
function marks.title(tag,n)
- local listindex = match(n,"^li::(.-)$")
+ local listindex = lpegmatch(pattern,n)
if listindex then
commands.savedlisttitle(tag,listindex,"marking")
else
@@ -669,7 +701,7 @@ function marks.title(tag,n)
end
function marks.number(tag,n) -- no spec
- local listindex = match(n,"^li::(.-)$")
+ local listindex = lpegmatch(pattern,n)
if listindex then
commands.savedlistnumber(tag,listindex)
else
@@ -680,6 +712,9 @@ end
-- interface
+commands.markingtitle = marks.title
+commands.markingnumber = marks.number
+
commands.definemarking = marks.define
commands.relatemarking = marks.relate
commands.setmarking = marks.set
diff --git a/Master/texmf-dist/tex/context/base/strc-mat.mkiv b/Master/texmf-dist/tex/context/base/strc-mat.mkiv
index 20fa078a2d3..18cb005cbea 100644
--- a/Master/texmf-dist/tex/context/base/strc-mat.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-mat.mkiv
@@ -171,10 +171,20 @@
\global\setfalse\c_strc_formulas_inside_place_sub
\to \everyresetformulas
+% \def\strc_formulas_place_numbering % place formula
+% {\settrue\c_strc_formulas_handle_number
+% \strc_formulas_check_reference\c_strc_formulas_place_number_mode\currentplaceformulareference
+% \glet\strc_formulas_place_number\strc_formulas_place_number_indeed
+% \glet\strc_formulas_place_number_nested\strc_formulas_place_number_nested_indeed}
+
\def\strc_formulas_place_numbering % place formula
{\settrue\c_strc_formulas_handle_number
\strc_formulas_check_reference\c_strc_formulas_place_number_mode\currentplaceformulareference
- \glet\strc_formulas_place_number\strc_formulas_place_number_indeed
+ \ifnum\c_strc_formulas_place_number_mode=\plustwo
+ \glet\strc_formulas_place_number\relax
+ \else
+ \glet\strc_formulas_place_number\strc_formulas_place_number_indeed
+ \fi
\glet\strc_formulas_place_number_nested\strc_formulas_place_number_nested_indeed}
\def\strc_formulas_handle_number % formulas
@@ -610,6 +620,33 @@
%D \startformula x \stopformula % now has \noindent (in mkii we messed with baselineskip)
%D \stoptyping
+% \unexpanded\def\startdisplaymath
+% {\bgroup
+% \par
+% \informulatrue
+% \beforedisplayspace
+% \par
+% \ifvmode
+% \prevdepth-\maxdimen % texbook pagina 79-80
+% \fi
+% \noindent % else funny hlist with funny baselineskip
+% $$% \Ucheckedstartdisplaymath
+% \setdisplaydimensions
+% \startinnermath}
+%
+% \unexpanded\def\stopdisplaymath
+% {\stopinnermath
+% $$% \Ucheckedstopdisplaymath
+% \par
+% \afterdisplayspace
+% \par
+% \egroup}
+
+\newconstant\c_strc_formulas_space_model
+
+\c_strc_formulas_space_model\plusone
+%c_strc_formulas_space_model\plustwo % needs chdcking with spac-ver
+
\unexpanded\def\startdisplaymath
{\bgroup
\par
@@ -617,18 +654,32 @@
\beforedisplayspace
\par
\ifvmode
- \prevdepth-\maxdimen % texbook pagina 79-80
+ \ifcase\c_strc_formulas_space_model
+ % nothing
+ \or
+ % nothing yet
+ \or
+ \prevdepth-\maxdimen % texbook pagina 79-80
+ \fi
\fi
\noindent % else funny hlist with funny baselineskip
- $$% \Ustartdisplaymath
+ \Ucheckedstartdisplaymath
\setdisplaydimensions
- %\setpredisplaysize
\startinnermath}
\unexpanded\def\stopdisplaymath
{\stopinnermath
- $$% \Ustopdisplaymath
+ \Ucheckedstopdisplaymath
\par
+ \ifvmode
+ \ifcase\c_strc_formulas_space_model
+ % nothing
+ \or
+ \prevdepth .5\strutdp
+ \or
+ \prevdepth\lineheight
+ \fi
+ \fi
\afterdisplayspace
\par
\egroup}
diff --git a/Master/texmf-dist/tex/context/base/strc-not.lua b/Master/texmf-dist/tex/context/base/strc-not.lua
index 882e00a4435..40b78d59f8b 100644
--- a/Master/texmf-dist/tex/context/base/strc-not.lua
+++ b/Master/texmf-dist/tex/context/base/strc-not.lua
@@ -8,7 +8,6 @@ if not modules then modules = { } end modules ['strc-not'] = {
local format = string.format
local next = next
-local texcount = tex.count
local trace_notes = false trackers.register("structures.notes", function(v) trace_notes = v end)
local trace_references = false trackers.register("structures.notes.references", function(v) trace_references = v end)
@@ -24,6 +23,9 @@ local notes = structures.notes
local references = structures.references
local counterspecials = counters.specials
+local texgetcount = tex.getcount
+local texgetbox = tex.getbox
+
notes.states = notes.states or { }
lists.enhancers = lists.enhancers or { }
@@ -91,10 +93,10 @@ end
local function getn(tag)
local nd = notedata[tag]
- return (nd and #nd) or 0
+ return nd and #nd or 0
end
-notes.get = get
+notes.get = get
notes.getn = getn
-- we could make a special enhancer
@@ -189,7 +191,7 @@ local function hascontent(tag)
local ok = notestates[tag]
if ok then
if ok.kind == "insert" then
- ok = tex.box[ok.number]
+ ok = texgetbox(ok.number)
if ok then
ok = tbs.list
ok = lst and lst.next
@@ -257,7 +259,7 @@ function notes.checkpagechange(tag) -- called before increment !
end
elseif current then
-- we need to locate the next one, best guess
- if texcount.realpageno > current.pagenumber.number then
+ if texgetcount("realpageno") > current.pagenumber.number then
counters.reset(tag)
end
end
@@ -280,7 +282,7 @@ commands.postponenotes = notes.postpone
function notes.setsymbolpage(tag,n,l)
local l = l or listindex(tag,n)
if l then
- local p = texcount.realpageno
+ local p = texgetcount("realpageno")
if trace_notes or trace_references then
report_notes("note %a of %a with list index %a gets symbol page %a",n,tag,l,p)
end
@@ -382,7 +384,7 @@ function commands.flushnotes(tag,whatkind,how) -- store and postpone
local rp = get(tag,i)
rp = rp and rp.references
rp = rp and rp.symbolpage or 0
- if rp > texcount.realpageno then
+ if rp > texgetcount("realpageno") then
state.start = i
return
end
diff --git a/Master/texmf-dist/tex/context/base/strc-not.mkvi b/Master/texmf-dist/tex/context/base/strc-not.mkvi
index 76816d0355c..60ab66c980f 100644
--- a/Master/texmf-dist/tex/context/base/strc-not.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-not.mkvi
@@ -231,7 +231,7 @@
\ifnotesenabled
\strc_counters_increment_sub\currentconstructionnumber\currentconstructionlevel
\fi
- \doifnextoptionalelse\strc_notations_command_yes\strc_notations_command_nop}
+ \doifnextoptionalcselse\strc_notations_command_yes\strc_notations_command_nop}
\unexpanded\def\strc_notations_command_nop#title%
{\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title={#title},\c!bookmark=,\c!list=][]%
@@ -265,7 +265,7 @@
% \normalexpanded % not that efficient but also not that frequently used (\normaldef for parser)
% {\normaldef\noexpand\strc_pickup_yes[##1]##2\csname\e!stop#stoptag\endcsname{\strc_notations_command_yes[##1]{##2}}%
% \normaldef\noexpand\strc_pickup_nop ##1\csname\e!stop#stoptag\endcsname{\strc_notations_command_nop {##1}}}%
-% \doifnextoptionalelse\strc_pickup_yes\strc_pickup_nop}
+% \doifnextoptionalcselse\strc_pickup_yes\strc_pickup_nop}
\unexpanded\def\strc_notations_start#tag#stoptag%
{\begingroup
@@ -278,7 +278,7 @@
\normalexpanded % not that efficient but also not that frequently used (\normaldef for parser)
{\def\noexpand\strc_pickup_yes[#one]#two\csname\e!stop#stoptag\endcsname{\strc_notations_command_yes[#one]{#two}}%
\def\noexpand\strc_pickup_nop #one\csname\e!stop#stoptag\endcsname{\strc_notations_command_nop {#one}}}%
- \doifnextoptionalelse\strc_pickup_yes\strc_pickup_nop}
+ \doifnextoptionalcselse\strc_pickup_yes\strc_pickup_nop}
\unexpanded\def\strc_notations_start_yes[#reference]#title%
{\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference={#reference},\c!title={#title},\c!bookmark=,\c!list=][]%
@@ -460,7 +460,11 @@
\else\ifconditional\inlocalnotes % todo: per note class
\global\settrue\postponednote
\else
+\ifconditional\c_strc_notes_delayed
+ % probably end notes
+\else
\handlenoteinsert\currentnote\currentnotenumber % either an insert or just delayed
+\fi
\fi\fi
\endgroup
\fi
@@ -622,8 +626,15 @@
\unexpanded\def\strc_notes_inject_symbol_nop
{\strc_notes_inject_symbol_indeed\conditionalfalse}
-\unexpanded\def\strc_notes_inject_symbol_snc
- {\currentconstructionsynchronize} % this flushes the data to the list
+% % this flushes the data to the list
+%
+% \unexpanded\def\strc_notes_inject_symbol_snc
+% {\currentconstructionsynchronize}
+%
+% but instead we need to do this with the content
+
+\unexpanded\def\strc_notes_inject_symbol_snc % so this will go away probably
+ {}
\unexpanded\def\strc_notes_inject_symbol_indeed#synchronize%
{\removeunwantedspaces
@@ -749,7 +760,9 @@
%appendtoks \notesenabledfalse \to \everymarking
\appendtoks \notesenabledfalse \to \everybeforepagebody
-\appendtoks \notesenabledfalse \to \everystructurelist % quick hack
+\appendtoks \notesenabledfalse \to \everystructurelist % quick hack
+\appendtoks \notesenabledfalse \to \everysimplifycommands % quick hack
+\appendtoks \notesenabledfalse \to \everypreroll % quick hack
%D Often we need to process the whole set of notes and to make that
%D fast, we use a token register:
@@ -1170,18 +1183,20 @@
\usesetupsparameter\noteparameter % experimental
\doifelse{\noteparameter\c!paragraph}\v!yes
{\nointerlineskip
+ \startvboxtohboxseparator
+ \noteparameter\c!inbetween
+ \stopvboxtohboxseparator
\startvboxtohbox
- \handlenoteitself{#tag}{#id}%
- % add some slack
+ \handlenoteitself{#tag}{#id}%
\stopvboxtohbox}
{\handlenoteitself{#tag}{#id}}%
\egroup
\the\everyafternoteinsert
\endgroup}
-\unexpanded\def\betweennoteitself#tag%
+\unexpanded\def\betweennoteitself#tag% used ?
{\edef\currentnote{#tag}%
- \doif{\noteparameter\c!paragraph}\v!yes\strc_notes_between_paragraphs}
+ \doif{\noteparameter\c!paragraph}\v!yes{\noteparameter\c!inbetween}}
\unexpanded\def\handlenoteitself#tag#id%
{\edef\currentnotenumber{#id}%
@@ -1192,7 +1207,7 @@
% as we can have collected notes (e.g. in tables) we need to recover
% \currentdescriptionattribute and \currentdescriptionsynchronize
%
- %\reinstateconstructionnumberentry\currentconstructionlistentry % we could store the number in the entry
+ \reinstateconstructionnumberentry\currentconstructionlistentry % we could store the number in the entry (e.g. needed when local notes in table)
%
\dontcomplain
% \begingroup
@@ -1233,6 +1248,7 @@
\appendtoks
\doif{\noteparameter\c!scope}\v!page{\floatingpenalty\maxdimen}% experiment
\penalty\currentnotepenalty
+ %\interlinepenalty\maxdimen % todo
\forgetall
\strc_notes_set_bodyfont
\redoconvertfont % to undo \undo calls in in headings etc
@@ -1289,6 +1305,7 @@
\strc_notes_set_bodyfont
\setbox\scratchbox\hbox
{\strc_notes_flush_inserts}%
+ \page_postprocessors_linenumbers_deepbox\scratchbox
\setbox\scratchbox\hbox
{\setupcurrentnote
[\c!location=,
@@ -1304,6 +1321,9 @@
\fi}}%
\setbox\scratchbox\hbox{\lower\strutdepth\box\scratchbox}%
\dp\scratchbox\strutdepth % so we know that it has the note bodyfont depth
+ \ifvmode
+ \nointerlineskip % else sometimes empty line
+ \fi
\box\scratchbox
\egroup
\endgraf
@@ -1332,24 +1352,11 @@
% idea: tag with attr and then just flush them again
-\def\strc_notes_between_paragraphs % should not be too much
- {\noteparameter\c!inbetween}
-
-\def\strc_notes_between_paragraphs_first
- {\glet\strc_notes_between_paragraphs_indeed\strc_notes_between_paragraphs}% shape works reverse
-
-\def\strc_notes_flush_global % will be done in lua instead
+\def\strc_notes_flush_global
{\doifelse{\noteparameter\c!paragraph}\v!yes
- {\vbox
- {\beginofshapebox
- \iftrialtypesetting\unvcopied\else\unvboxed\fi\currentnoteinsertionnumber
- \endofshapebox
- \let\strc_notes_between_paragraphs_indeed\strc_notes_between_paragraphs_first % shape works reverse
- \doreshapebox
- {\hbox{\unhbox\shapebox\strc_notes_between_paragraphs_indeed}}
- \donothing \donothing \donothing % get rid of penalties etc
- \innerflushshapebox
- \convertvboxtohbox}}
+ {\vbox\starthboxestohbox
+ \iftrialtypesetting\unvcopy\else\unvbox\fi\currentnoteinsertionnumber
+ \stophboxestohbox}
{\iftrialtypesetting\unvcopied\else\unvboxed\fi\currentnoteinsertionnumber}}
%D Supporting end notes is surprisingly easy. Even better, we
@@ -1779,19 +1786,19 @@
{\dodoubleempty\strc_notes_symbol}
\def\strc_notes_symbol[#tag][#reference]%
- {\dontleavehmode
- \begingroup
- \edef\currentnote{#tag}%
- \usenotestyleandcolor\c!textstyle\c!textcolor
- \ifnotesenabled
+ {\ifnotesenabled
+ \dontleavehmode
+ \begingroup
+ \edef\currentnote{#tag}%
+ \usenotestyleandcolor\c!textstyle\c!textcolor
\ifsecondargument
\unskip
\noteparameter\c!textcommand{\in[#reference]}% command here?
\else
\noteparameter\c!textcommand\lastnotesymbol % check if command double
\fi
- \fi
- \endgroup}
+ \endgroup
+ \fi}
\unexpanded\def\note
{\dodoubleempty\strc_notes_note}
diff --git a/Master/texmf-dist/tex/context/base/strc-num.lua b/Master/texmf-dist/tex/context/base/strc-num.lua
index b0eae6b78bb..e1fc6003071 100644
--- a/Master/texmf-dist/tex/context/base/strc-num.lua
+++ b/Master/texmf-dist/tex/context/base/strc-num.lua
@@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['strc-num'] = {
local format = string.format
local next, type = next, type
local min, max = math.min, math.max
-local texcount, texsetcount = tex.count, tex.setcount
+local texsetcount = tex.setcount
-- Counters are managed here. They can have multiple levels which makes it easier to synchronize
-- them. Synchronization is sort of special anyway, as it relates to document structuring.
@@ -147,9 +147,9 @@ local function dummyconstructor(t,name,i)
end
setmetatableindex(constructor,function(t,k)
- if trace_counters then
- report_counters("unknown constructor %a",k)
- end
+ -- if trace_counters then
+ -- report_counters("unknown constructor %a",k)
+ -- end
return dummyconstructor
end)
@@ -404,7 +404,7 @@ function counters.restart(name,n,newstart,noreset)
if newstart then
local d = allocate(name,n)
d.start = newstart
- if not noreset then
+ if not noreset then -- why / when needed ?
reset(name,n) -- hm
end
end
@@ -589,8 +589,13 @@ function commands.doifnotcounter (name) commands.doifnot (counterdata[name]) end
function commands.incrementedcounter(...) context(counters.add(...)) end
+-- the noreset is somewhat messy ... always false messes up e.g. itemize but true the pagenumbers
+--
+-- if this fails i'll clean up this still somewhat experimental mechanism (but i need use cases)
+
function commands.checkcountersetup(name,level,start,state)
- counters.restart(name,1,start,true) -- no reset
+ local noreset = true -- level > 0 -- was true
+ counters.restart(name,1,start,noreset) -- was true
counters.setstate(name,state)
counters.setlevel(name,level)
sections.setchecker(name,level,counters.reset)
diff --git a/Master/texmf-dist/tex/context/base/strc-num.mkiv b/Master/texmf-dist/tex/context/base/strc-num.mkiv
index 679dd0532c9..6802027e67b 100644
--- a/Master/texmf-dist/tex/context/base/strc-num.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-num.mkiv
@@ -17,6 +17,8 @@
\unprotect
+\startcontextdefinitioncode
+
% work in progress
% to be checked: can we use the command handler code here?
% all settings will move to lua
@@ -63,6 +65,11 @@
\appendtoks
\ifx\currentcounter\empty \else
+ \edef\p_number{\counterparameter\c!number}%
+ \ifx\p_number\empty \else
+ \ctxcommand{setcounter("\counterparameter\s!name",1,\number\p_number)}%
+ \letcounterparameter\c!number\empty
+ \fi
\edef\p_start{\counterparameter\c!start}%
\setexpandedcounterparameter\c!start{\ifx\p_start\empty0\else\number\p_start\fi}%
\strc_counters_check_setup
@@ -351,7 +358,7 @@
{\begingroup
\edef\currentcounter{#1}%
\ifsecondargument\setupcurrentcounter[#2]\fi
- \ctxlua{structures.sections.prefixedconverted(
+ \ctxcommand{prefixedconverted(
"\counterparameter\s!name",
{
prefix = "\counterparameter\c!prefix",
@@ -379,7 +386,7 @@
\endgroup}
\def\directconvertedcounter#1#2% name, type
- {\ctxlua{structures.sections.prefixedconverted(
+ {\ctxcommand{prefixedconverted(
"\namedcounterparameter{#1}\s!name",
{
prefix = "\namedcounterparameter{#1}\c!prefix",
@@ -480,6 +487,7 @@
% currentstructurecomponent => \strc_current_ or just \m_strc_
+
\unexpanded\def\strc_counters_register_component#1#2#3#4#5#6#7[#8][#9]% maybe also nolist
{\begingroup
%
@@ -504,119 +512,153 @@
\fi
%
\ifx\p_hascaption\v!yes
- \xdef\currentstructurecomponentname {#3\s!name}%
- \xdef\currentstructurecomponentlevel {#3\c!level}%
- \edef\currentstructurecomponentexpansion {#3\c!expansion}%
- \xdef\currentstructurecomponentxmlsetup {#3\c!xmlsetup}%
- \xdef\currentstructurecomponentcatcodes {#3\s!catcodes}%
- \xdef\currentstructurecomponentlabel {#3\c!label}%
- \xdef\currentstructurecomponentreference {#3\c!reference}%
- \xdef\currentstructurecomponentreferenceprefix{#3\c!referenceprefix}%
- \ifx\currentstructurecomponentexpansion\s!xml
- \xmlstartraw
- \xdef\currentstructurecomponenttitle {#3\c!title}%
- \xdef\currentstructurecomponentbookmark{#3\c!bookmark}%
- \xdef\currentstructurecomponentmarking {#3\c!marking}%
- \xdef\currentstructurecomponentlist {#3\c!list}%
- \xmlstopraw
- \ifx\currentstructurecomponentlist\empty
- \globallet\currentstructurecomponentlist\currentstructurecomponenttitle
- \fi
- \globallet\currentstructurecomponentcoding\s!xml
+ \strc_counters_register_component_list{#1}{#3}{#4}{#9}%
+ \else\ifx\currentstructurecomponentreference\empty
+ \strc_counters_register_component_none
+ \else
+ \strc_counters_register_component_page{#3}%
+ \fi\fi
+ \endgroup}
+
+\def\strc_counters_register_component_none
+ {\glet\m_strc_counters_last_registered_index \relax
+ \glet\m_strc_counters_last_registered_attribute \attributeunsetvalue
+ \glet\m_strc_counters_last_registered_synchronize\relax}
+
+\def\strc_counters_register_component_page#1%
+ {\xdef\currentstructurecomponentreference {#1\c!reference}%
+ \xdef\currentstructurecomponentreferenceprefix{#1\c!referenceprefix}%
+ % maybe have a helper in strc-ref.mkvi
+ \setnextinternalreference
+ \ctxcommand{setreferenceattribute(% can be helper with less passed
+ "\s!page",
+ "\currentstructurecomponentreferenceprefix",
+ "\currentstructurecomponentreference",
+ {
+ references = {
+ internal = \nextinternalreference,
+ block = "\currentsectionblock",
+ section = structures.sections.currentid(),
+ },
+ metadata = {
+ kind = "page",
+ },
+ },
+ "\interactionparameter\c!focus")
+ }%
+ \xdef\m_strc_counters_last_registered_attribute {\the\lastdestinationattribute}%
+ \glet\m_strc_counters_last_registered_index \relax
+ \glet\m_strc_counters_last_registered_synchronize\relax}
+
+\def\strc_counters_register_component_list#1#2#3#4%
+ {\xdef\currentstructurecomponentname {#2\s!name}%
+ \xdef\currentstructurecomponentlevel {#2\c!level}%
+ \edef\currentstructurecomponentexpansion {#2\c!expansion}%
+ \xdef\currentstructurecomponentxmlsetup {#2\c!xmlsetup}%
+ \xdef\currentstructurecomponentcatcodes {#2\s!catcodes}%
+ \xdef\currentstructurecomponentlabel {#2\c!label}%
+ \xdef\currentstructurecomponentreference {#2\c!reference}%
+ \xdef\currentstructurecomponentreferenceprefix{#2\c!referenceprefix}%
+ \ifx\currentstructurecomponentexpansion\s!xml
+ \xmlstartraw
+ \xdef\currentstructurecomponenttitle {#2\c!title}%
+ \xdef\currentstructurecomponentbookmark{#2\c!bookmark}%
+ \xdef\currentstructurecomponentmarking {#2\c!marking}%
+ \xdef\currentstructurecomponentlist {#2\c!list}%
+ \xmlstopraw
+ \ifx\currentstructurecomponentlist\empty
+ \globallet\currentstructurecomponentlist\currentstructurecomponenttitle
+ \fi
+ \globallet\currentstructurecomponentcoding\s!xml
+ \else
+ \ifx\currentstructurecomponentexpansion\v!yes
+ \xdef\currentstructurecomponenttitle {#2\c!title}%
+ \xdef\currentstructurecomponentbookmark{#2\c!bookmark}%
+ \xdef\currentstructurecomponentmarking {#2\c!marking}%
+ \xdef\currentstructurecomponentlist {#2\c!list}%
\else
- \ifx\currentstructurecomponentexpansion\v!yes
- \xdef\currentstructurecomponenttitle {#3\c!title}%
- \xdef\currentstructurecomponentbookmark{#3\c!bookmark}%
- \xdef\currentstructurecomponentmarking {#3\c!marking}%
- \xdef\currentstructurecomponentlist {#3\c!list}%
- \else
- \xdef\currentstructurecomponenttitle {#4\c!title}%
- \xdef\currentstructurecomponentbookmark{#4\c!bookmark}%
- \xdef\currentstructurecomponentmarking {#4\c!marking}%
- \xdef\currentstructurecomponentlist {#4\c!list}%
- \iflocation \ifx\currentstructurecomponentbookmark\empty
- \begingroup
- \simplifycommands
- \xdef\currentstructurecomponentbookmark{\detokenize\expandafter{\normalexpanded{#3\c!title}}}%
- \endgroup
- \fi \fi
- \fi
- \ifx\currentstructurecomponentlist\empty
- \globallet\currentstructurecomponentlist\currentstructurecomponenttitle
- \fi
- \globallet\currentstructurecomponentcoding\s!tex
+ \xdef\currentstructurecomponenttitle {#3\c!title}%
+ \xdef\currentstructurecomponentbookmark{#3\c!bookmark}%
+ \xdef\currentstructurecomponentmarking {#3\c!marking}%
+ \xdef\currentstructurecomponentlist {#3\c!list}%
+ \iflocation \ifx\currentstructurecomponentbookmark\empty
+ \begingroup
+ \simplifycommands
+ \xdef\currentstructurecomponentbookmark{\detokenize\expandafter{\normalexpanded{#2\c!title}}}%
+ \endgroup
+ \fi \fi
\fi
- %
- \setnextinternalreference
- \xdef\m_strc_counters_last_registered_index{\ctxcommand{addtolist{
- metadata = {
- kind = "#1",
- name = "\currentname",
- level = structures.sections.currentlevel(),
- catcodes = \the\ifx\currentstructurecomponentcatcodes\empty\catcodetable\else\csname\currentstructurecomponentcatcodes\endcsname\fi,
- coding = "\currentstructurecomponentcoding",
- \ifx\currentstructurecomponentcoding\s!xml
- xmlroot = "\xmldocument",
- \fi
- \ifx\currentstructurecomponentxmlsetup\empty \else
- xmlsetup = "\currentstructurexmlsetup",
- \fi
- },
- references = {
- internal = \nextinternalreference,
- block = "\currentsectionblock",
- reference = "\currentstructurecomponentreference",
- referenceprefix = "\currentstructurecomponentreferenceprefix",
- section = structures.sections.currentid(),
- },
- titledata = {
- label = \!!bs\detokenize\expandafter{\currentstructurecomponentlabel }\!!es,
- title = \!!bs\detokenize\expandafter{\currentstructurecomponenttitle }\!!es,
- \ifx\currentstructurecomponentbookmark\currentstructurecomponenttitle \else
- bookmark = \!!bs\detokenize\expandafter{\currentstructurecomponentbookmark }\!!es,
- \fi
- \ifx\currentstructurecomponentmarking\currentstructurecomponenttitle \else
- marking = \!!bs\detokenize\expandafter{\currentstructurecomponentmarking }\!!es,
- \fi
- \ifx\currentstructurecomponentlist\currentstructurecomponenttitle \else
- list = \!!bs\detokenize\expandafter{\currentstructurecomponentlist}\!!es,
- \fi
- },
+ \ifx\currentstructurecomponentlist\empty
+ \globallet\currentstructurecomponentlist\currentstructurecomponenttitle
+ \fi
+ \globallet\currentstructurecomponentcoding\s!tex
+ \fi
+ %
+ \setnextinternalreference
+ \xdef\m_strc_counters_last_registered_index{\ctxcommand{addtolist{
+ metadata = {
+ kind = "#1",
+ name = "\currentname",
+ level = structures.sections.currentlevel(),
+ catcodes = \the\ifx\currentstructurecomponentcatcodes\empty\catcodetable\else\csname\currentstructurecomponentcatcodes\endcsname\fi,
+ coding = "\currentstructurecomponentcoding",
+ \ifx\currentstructurecomponentcoding\s!xml
+ xmlroot = "\xmldocument",
+ \fi
+ \ifx\currentstructurecomponentxmlsetup\empty \else
+ xmlsetup = "\currentstructurexmlsetup",
+ \fi
+ },
+ references = {
+ internal = \nextinternalreference,
+ block = "\currentsectionblock",
+ reference = "\currentstructurecomponentreference",
+ referenceprefix = "\currentstructurecomponentreferenceprefix",
+ section = structures.sections.currentid(),
+ },
+ titledata = {
+ label = \!!bs\detokenize\expandafter{\currentstructurecomponentlabel }\!!es,
+ title = \!!bs\detokenize\expandafter{\currentstructurecomponenttitle }\!!es,
+ \ifx\currentstructurecomponentbookmark\currentstructurecomponenttitle \else
+ bookmark = \!!bs\detokenize\expandafter{\currentstructurecomponentbookmark}\!!es,
+ \fi
+ \ifx\currentstructurecomponentmarking\currentstructurecomponenttitle \else
+ marking = \!!bs\detokenize\expandafter{\currentstructurecomponentmarking }\!!es,
+ \fi
+ \ifx\currentstructurecomponentlist\currentstructurecomponenttitle \else
+ list = \!!bs\detokenize\expandafter{\currentstructurecomponentlist}\!!es,
+ \fi
+ },
\ifx\p_hasnumber\v!yes
- prefixdata = {
- prefix = "#3\c!prefix",
- separatorset = "#3\c!prefixseparatorset",
- conversion = \!!bs#3\c!prefixconversion\!!es,
- conversionset = "#3\c!prefixconversionset",
- set = "#3\c!prefixset",
- % segments = "#3\c!prefixsegments",
- segments = "\p_prefixsegments",
- connector = \!!bs#3\c!prefixconnector\!!es,
- },
- numberdata = { % more helpers here, like compact elsewhere
- numbers = structures.counters.compact("\currentcounter",nil,true),
- group = "#3\c!group",
- groupsuffix = \!!bs#3\c!groupsuffix\!!es,
- counter = "\currentcounter",
- separatorset = "#3\c!numberseparatorset",
- conversion = \!!bs#3\c!numberconversion\!!es,
- conversionset = "#3\c!numberconversionset",
- starter = \!!bs#3\c!numberstarter\!!es,
- stopper = \!!bs#3\c!numberstopper\!!es,
- segments = "#3\c!numbersegments",
- },
+ prefixdata = {
+ prefix = "#2\c!prefix",
+ separatorset = "#2\c!prefixseparatorset",
+ conversion = \!!bs#2\c!prefixconversion\!!es,
+ conversionset = "#2\c!prefixconversionset",
+ set = "#2\c!prefixset",
+ % segments = "#2\c!prefixsegments",
+ segments = "\p_prefixsegments",
+ connector = \!!bs#2\c!prefixconnector\!!es,
+ },
+ numberdata = { % more helpers here, like compact elsewhere
+ numbers = structures.counters.compact("\currentcounter",nil,true),
+ group = "#2\c!group",
+ groupsuffix = \!!bs#2\c!groupsuffix\!!es,
+ counter = "\currentcounter",
+ separatorset = "#2\c!numberseparatorset",
+ conversion = \!!bs#2\c!numberconversion\!!es,
+ conversionset = "#2\c!numberconversionset",
+ starter = \!!bs#2\c!numberstarter\!!es,
+ stopper = \!!bs#2\c!numberstopper\!!es,
+ segments = "#2\c!numbersegments",
+ },
\fi
- userdata = \!!bs\detokenize{#9}\!!es % will be converted to table at the lua end
- }
- }}%
- \xdef\m_strc_counters_last_registered_attribute {\ctxcommand {setinternalreference(nil,nil,\nextinternalreference)}}%
- \xdef\m_strc_counters_last_registered_synchronize{\ctxlatecommand{enhancelist(\m_strc_counters_last_registered_index)}}%
- \else
- \glet\m_strc_counters_last_registered_index \relax
- \glet\m_strc_counters_last_registered_attribute \attributeunsetvalue
- \glet\m_strc_counters_last_registered_synchronize\relax
- \fi
- \endgroup}
+ userdata = \!!bs\detokenize{#4}\!!es % will be converted to table at the lua end
+ }
+ }}%
+ \ctxcommand{setinternalreference(nil,nil,\nextinternalreference)}%
+ \xdef\m_strc_counters_last_registered_attribute {\the\lastdestinationattribute}%
+ \xdef\m_strc_counters_last_registered_synchronize{\ctxlatecommand{enhancelist(\m_strc_counters_last_registered_index)}}}
\let\m_strc_counters_last_registered_index \relax
\let\m_strc_counters_last_registered_attribute \relax
@@ -744,4 +786,26 @@
% \unexpanded\def#7##1% register
% {\normalexpanded{#5{\the#5\edef\noexpand\currentcounter{##1}\noexpand\the\everysetupcounter}}}}
+%D For good old times (easier to explain in manuals):
+%D
+%D \starttyping
+%D \setcounter[userpage][9]
+%D \setupcounter[userpage][number=9]
+%D \setupuserpagenumber[number=9]
+%D \stoptyping
+
+% needs testing: we might have conflicts with mixins
+%
+% \appendtoks
+% \ifx\currentcounter\empty \else
+% \edef\p_number{\counterparameter\c!number}%
+% \ifx\p_number\empty \else
+% \strc_counters_set\currentcounter\p_number
+% \letcounterparameter\c!number\empty
+% \fi
+% \fi
+% \to \everysetupcounter
+
+\stopcontextdefinitioncode
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-pag.lua b/Master/texmf-dist/tex/context/base/strc-pag.lua
index f70d37d6397..c294a464539 100644
--- a/Master/texmf-dist/tex/context/base/strc-pag.lua
+++ b/Master/texmf-dist/tex/context/base/strc-pag.lua
@@ -6,8 +6,6 @@ if not modules then modules = { } end modules ['strc-pag'] = {
license = "see context related readme files"
}
-local texcount = tex.count
-
local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local trace_pages = false trackers.register("structures.pages", function(v) trace_pages = v end)
@@ -26,39 +24,50 @@ local counterdata = counters.data
local variables = interfaces.variables
local context = context
+local commands = commands
local processors = typesetters.processors
local applyprocessor = processors.apply
local startapplyprocessor = processors.startapply
local stopapplyprocessor = processors.stopapply
+local texsetcount = tex.setcount
+local texgetcount = tex.getcount
+
+local ctx_convertnumber = context.convertnumber
+
-- storage
local collected, tobesaved = allocate(), allocate()
pages.collected = collected
pages.tobesaved = tobesaved
+pages.nofpages = 0
local function initializer()
collected = pages.collected
tobesaved = pages.tobesaved
+ pages.nofpages = #collected
end
job.register('structures.pages.collected', tobesaved, initializer)
local specification = { } -- to be checked
-function pages.save(prefixdata,numberdata)
- local realpage, userpage = texcount.realpageno, texcount.userpageno
+function pages.save(prefixdata,numberdata,extradata)
+ local realpage = texgetcount("realpageno")
+ local userpage = texgetcount("userpageno")
if realpage > 0 then
if trace_pages then
report_pages("saving page %s.%s",realpage,userpage)
end
local data = {
- number = userpage,
- block = sections.currentblock(),
- prefixdata = prefixdata and helpers.simplify(prefixdata),
- numberdata = numberdata and helpers.simplify(numberdata),
+ number = userpage,
+ viewerprefix = extradata.viewerprefix,
+ state = extradata.state,
+ block = sections.currentblock(),
+ prefixdata = prefixdata and helpers.simplify(prefixdata),
+ numberdata = numberdata and helpers.simplify(numberdata),
}
tobesaved[realpage] = data
if not collected[realpage] then
@@ -73,32 +82,32 @@ end
-- builder we have to make sure it starts at least at 1.
function counters.specials.userpage()
- local r = texcount.realpageno
+ local r = texgetcount("realpageno")
if r > 0 then
local t = tobesaved[r]
if t then
- t.number = texcount.userpageno
+ t.number = texgetcount("userpageno")
if trace_pages then
report_pages("forcing pagenumber of realpage %s to %s",r,t.number)
end
return
end
end
- local u = texcount.userpageno
+ local u = texgetcount("userpageno")
if u == 0 then
if trace_pages then
report_pages("forcing pagenumber of realpage %s to %s (probably a bug)",r,1)
end
counters.setvalue("userpage",1)
- texcount.userpageno = 1
+ texsetcount("userpageno",1) -- not global ?
end
end
-local f_convert = string.formatters["\\convertnumber{%s}{%s}"]
-
-local function convertnumber(str,n)
- return f_convert(str or "numbers",n)
-end
+-- local f_convert = string.formatters["\\convertnumber{%s}{%s}"]
+--
+-- local function convertnumber(str,n)
+-- return f_convert(str or "numbers",n)
+-- end
function pages.number(realdata,pagespec)
local userpage, block = realdata.number, realdata.block or "" -- sections.currentblock()
@@ -111,12 +120,12 @@ function pages.number(realdata,pagespec)
applyprocessor(starter)
end
if conversion ~= "" then
- context.convertnumber(conversion,userpage)
+ ctx_convertnumber(conversion,userpage)
else
if conversionset == "" then conversionset = "default" end
local theconversion = sets.get("structure:conversions",block,conversionset,1,"numbers") -- to be checked: 1
local data = startapplyprocessor(theconversion)
- context.convertnumber(data or "number",userpage)
+ ctx_convertnumber(data or "number",userpage)
stopapplyprocessor()
end
if stopper ~= "" then
@@ -252,8 +261,8 @@ function helpers.prefix(data,prefixspec)
end
function pages.is_odd(n)
- n = n or texcount.realpageno
- if texcount.pagenoshift % 2 == 0 then
+ n = n or texgetcount("realpageno")
+ if texgetcount("pagenoshift") % 2 == 0 then
return n % 2 == 0
else
return n % 2 ~= 0
@@ -311,3 +320,8 @@ function sections.prefixedconverted(name,prefixspec,numberspec)
counters.converted(name,numberspec)
end
end
+
+--
+
+commands.savepagedata = pages.save
+commands.prefixedconverted = sections.prefixedconverted -- weird place
diff --git a/Master/texmf-dist/tex/context/base/strc-pag.mkiv b/Master/texmf-dist/tex/context/base/strc-pag.mkiv
index 85cfeb40f55..6eddc0fba71 100644
--- a/Master/texmf-dist/tex/context/base/strc-pag.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-pag.mkiv
@@ -17,6 +17,8 @@
\unprotect
+\startcontextdefinitioncode
+
% Allocation:
\countdef\realpageno \zerocount \realpageno \plusone
@@ -106,8 +108,10 @@
\let\setuppagenumber\setupuserpagenumber
\let\resetpagenumber\resetuserpagenumber
+% invisible =
+
\def\strc_pagenumbers_page_state_save % \normalexpanded?
- {\ctxlua{structures.pages.save({
+ {\ctxcommand{savepagedata({
prefix = "\namedcounterparameter\s!userpage\c!prefix",
separatorset = "\namedcounterparameter\s!userpage\c!prefixseparatorset",
conversion = "\namedcounterparameter\s!userpage\c!prefixconversion",
@@ -120,6 +124,9 @@
conversionset = "\namedcounterparameter\s!userpage\c!numberconversionset",
starter = \!!bs\namedcounterparameter\s!userpage\c!numberstarter\!!es,
stopper = \!!bs\namedcounterparameter\s!userpage\c!numberstopper\!!es,
+ },{
+ viewerprefix = \!!bs\namedcounterparameter\s!userpage\c!viewerprefix\!!es,
+ state = \!!bs\namedcounterparameter\s!userpage\c!state\!!es,
}
)}}
@@ -457,4 +464,6 @@
\initializepagecounters
+\stopcontextdefinitioncode
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-ref.lua b/Master/texmf-dist/tex/context/base/strc-ref.lua
index 284418c484c..0c8bb6e53e8 100644
--- a/Master/texmf-dist/tex/context/base/strc-ref.lua
+++ b/Master/texmf-dist/tex/context/base/strc-ref.lua
@@ -14,11 +14,11 @@ if not modules then modules = { } end modules ['strc-ref'] = {
-- todo: autoload components when :::
-local format, find, gmatch, match, concat = string.format, string.find, string.gmatch, string.match, table.concat
-local texcount, texsetcount = tex.count, tex.setcount
-local rawget, tonumber = rawget, tonumber
+local format, find, gmatch, match, strip = string.format, string.find, string.gmatch, string.match, string.strip
+local floor = math.floor
+local rawget, tonumber, type = rawget, tonumber, type
local lpegmatch = lpeg.match
-local copytable = table.copy
+local insert, remove, copytable = table.insert, table.remove, table.copy
local formatters = string.formatters
local allocate = utilities.storage.allocate
@@ -44,16 +44,18 @@ local report_importing = logs.reporter("references","importing")
local report_empty = logs.reporter("references","empty")
local variables = interfaces.variables
-local constants = interfaces.constants
-local context = context
-
local v_default = variables.default
local v_url = variables.url
local v_file = variables.file
local v_unknown = variables.unknown
-local v_yes = variables.yes
+local v_page = variables.page
+local v_auto = variables.auto
+
+local context = context
+local commands = commands
-local texcount = tex.count
+local texgetcount = tex.getcount
+local texsetcount = tex.setcount
local texconditionals = tex.conditionals
local productcomponent = resolvers.jobs.productcomponent
@@ -64,6 +66,7 @@ local logspushtarget = logs.pushtarget
local logspoptarget = logs.poptarget
local settings_to_array = utilities.parsers.settings_to_array
+local process_settings = utilities.parsers.process_stripped_settings
local unsetvalue = attributes.unsetvalue
local structures = structures
@@ -89,6 +92,9 @@ local tobesaved = allocate()
local collected = allocate()
local tobereferred = allocate()
local referred = allocate()
+local usedinternals = allocate()
+local flaginternals = allocate()
+local usedviews = allocate()
references.derived = derived
references.specials = specials
@@ -101,6 +107,9 @@ references.tobesaved = tobesaved
references.collected = collected
references.tobereferred = tobereferred
references.referred = referred
+references.usedinternals = usedinternals
+references.flaginternals = flaginternals
+references.usedviews = usedviews
local splitreference = references.splitreference
local splitprefix = references.splitcomponent -- replaces: references.splitprefix
@@ -109,6 +118,22 @@ local componentsplitter = references.componentsplitter
local currentreference = nil
+local txtcatcodes = catcodes.numbers.txtcatcodes -- or just use "txtcatcodes"
+local context_delayed = context.delayed
+
+local ctx_pushcatcodes = context.pushcatcodes
+local ctx_popcatcodes = context.popcatcodes
+local ctx_dofinishsomereference = context.dofinishsomereference
+local ctx_dofromurldescription = context.dofromurldescription
+local ctx_dofromurlliteral = context.dofromurlliteral
+local ctx_dofromfiledescription = context.dofromfiledescription
+local ctx_dofromfileliteral = context.dofromfileliteral
+local ctx_expandreferenceoperation = context.expandreferenceoperation
+local ctx_expandreferencearguments = context.expandreferencearguments
+local ctx_getreferencestructureprefix = context.getreferencestructureprefix
+local ctx_convertnumber = context.convertnumber
+local ctx_emptyreference = context.emptyreference
+
storage.register("structures/references/defined", references.defined, "structures.references.defined")
local initializers = { }
@@ -117,6 +142,7 @@ local finalizers = { }
function references.registerinitializer(func) -- we could use a token register instead
initializers[#initializers+1] = func
end
+
function references.registerfinalizer(func) -- we could use a token register instead
finalizers[#finalizers+1] = func
end
@@ -127,12 +153,32 @@ local function initializer() -- can we use a tobesaved as metatable for collecte
for i=1,#initializers do
initializers[i](tobesaved,collected)
end
+ for prefix, list in next, collected do
+ for tag, data in next, list do
+ local r = data.references
+ local i = r.internal
+ if i then
+ internals[i] = c
+ usedinternals[i] = r.used
+ end
+ end
+ end
end
local function finalizer()
for i=1,#finalizers do
finalizers[i](tobesaved)
end
+ for prefix, list in next, tobesaved do
+ for tag, data in next, list do
+ local r = data.references
+ local i = r.internal
+ local f = flaginternals[i]
+ if f then
+ r.used = usedviews[i] or true
+ end
+ end
+ end
end
job.register('structures.references.collected', tobesaved, initializer, finalizer)
@@ -140,18 +186,44 @@ job.register('structures.references.collected', tobesaved, initializer, finalize
local maxreferred = 1
local nofreferred = 0
--- local function initializer() -- can we use a tobesaved as metatable for collected?
--- tobereferred = references.tobereferred
--- referred = references.referred
--- nofreferred = #referred
--- end
-
local function initializer() -- can we use a tobesaved as metatable for collected?
tobereferred = references.tobereferred
referred = references.referred
- setmetatableindex(referred,get) -- hm, what is get ?
+ nofreferred = #referred
end
+-- no longer fone this way
+
+-- references.resolvers = references.resolvers or { }
+-- local resolvers = references.resolvers
+--
+-- function resolvers.section(var)
+-- local vi = lists.collected[var.i[2]]
+-- if vi then
+-- var.i = vi
+-- var.r = (vi.references and vi.references.realpage) or (vi.pagedata and vi.pagedata.realpage) or 1
+-- else
+-- var.i = nil
+-- var.r = 1
+-- end
+-- end
+--
+-- resolvers.float = resolvers.section
+-- resolvers.description = resolvers.section
+-- resolvers.formula = resolvers.section
+-- resolvers.note = resolvers.section
+--
+-- function resolvers.reference(var)
+-- local vi = var.i[2]
+-- if vi then
+-- var.i = vi
+-- var.r = (vi.references and vi.references.realpage) or (vi.pagedata and vi.pagedata.realpage) or 1
+-- else
+-- var.i = nil
+-- var.r = 1
+-- end
+-- end
+
-- We make the array sparse (maybe a finalizer should optionally return a table) because
-- there can be quite some page links involved. We only store one action number per page
-- which is normally good enough for what we want (e.g. see above/below) and we do
@@ -216,7 +288,7 @@ local function referredpage(n)
end
end
-- fallback
- return texcount.realpageno
+ return texgetcount("realpageno")
end
references.referredpage = referredpage
@@ -226,7 +298,7 @@ function references.registerpage(n) -- called in the backend code
if n > maxreferred then
maxreferred = n
end
- tobereferred[n] = texcount.realpageno
+ tobereferred[n] = texgetcount("realpageno")
end
end
@@ -248,16 +320,15 @@ local function setnextorder(kind,name)
texsetcount("global","locationorder",lastorder)
end
-references.setnextorder = setnextorder
-function references.setnextinternal(kind,name)
+local function setnextinternal(kind,name)
setnextorder(kind,name) -- always incremented with internal
- local n = texcount.locationcount + 1
+ local n = texgetcount("locationcount") + 1
texsetcount("global","locationcount",n)
return n
end
-function references.currentorder(kind,name)
+local function currentorder(kind,name)
return orders[kind] and orders[kind][name] or lastorder
end
@@ -268,20 +339,27 @@ local function setcomponent(data)
local references = data and data.references
if references then
references.component = component
+ if references.referenceprefix == component then
+ references.referenceprefix = nil
+ end
end
return component
end
-- but for the moment we do it here (experiment)
end
-commands.setnextinternalreference = references.setnextinternal
+references.setnextorder = setnextorder
+references.setnextinternal = setnextinternal
+references.currentorder = currentorder
+references.setcomponent = setcomponent
+
+commands.setnextreferenceorder = setnextorder
+commands.setnextinternalreference = setnextinternal
function commands.currentreferenceorder(kind,name)
- context(references.currentorder(kind,name))
+ context(currentorder(kind,name))
end
-references.setcomponent = setcomponent
-
function references.set(kind,prefix,tag,data)
-- setcomponent(data)
local pd = tobesaved[prefix] -- nicer is a metatable
@@ -290,28 +368,29 @@ function references.set(kind,prefix,tag,data)
tobesaved[prefix] = pd
end
local n = 0
- for ref in gmatch(tag,"[^,]+") do
- if ref ~= "" then
- if check_duplicates and pd[ref] then
- if prefix and prefix ~= "" then
- report_references("redundant reference %a in namespace %a",ref,prefix)
- else
- report_references("redundant reference %a",ref)
- end
+ local function action(ref)
+ if ref == "" then
+ -- skip
+ elseif check_duplicates and pd[ref] then
+ if prefix and prefix ~= "" then
+ report_references("redundant reference %a in namespace %a",ref,prefix)
else
- n = n + 1
- pd[ref] = data
- context.dofinishsomereference(kind,prefix,ref)
+ report_references("redundant reference %a",ref)
end
+ else
+ n = n + 1
+ pd[ref] = data
+ ctx_dofinishsomereference(kind,prefix,ref)
end
end
+ process_settings(tag,action)
return n > 0
end
function references.enhance(prefix,tag)
local l = tobesaved[prefix][tag]
if l then
- l.references.realpage = texcount.realpageno
+ l.references.realpage = texgetcount("realpageno")
end
end
@@ -319,118 +398,85 @@ commands.enhancereference = references.enhance
-- -- -- related to strc-ini.lua -- -- --
-references.resolvers = references.resolvers or { }
-local resolvers = references.resolvers
-
-local function getfromlist(var)
- local vi = var.i
- if vi then
- vi = vi[3] or lists.collected[vi[2]]
- if vi then
- local r = vi.references and vi.references
- if r then
- r = r.realpage
- end
- if not r then
- r = vi.pagedata and vi.pagedata
- if r then
- r = r.realpage
- end
- end
- var.i = vi
- var.r = r or 1
- else
- var.i = nil
- var.r = 1
- end
- else
- var.i = nil
- var.r = 1
- end
-end
-
--- resolvers.section = getfromlist
--- resolvers.float = getfromlist
--- resolvers.description = getfromlist
--- resolvers.formula = getfromlist
--- resolvers.note = getfromlist
-
-setmetatableindex(resolvers,function(t,k)
- local v = getfromlist
- resolvers[k] = v
- return v
-end)
-
-function resolvers.reference(var)
- local vi = var.i[2] -- check
- if vi then
- var.i = vi
- var.r = (vi.references and vi.references.realpage) or (vi.pagedata and vi.pagedata.realpage) or 1
- else
- var.i = nil
- var.r = 1
- end
-end
+-- no metatable here .. better be sparse
local function register_from_lists(collected,derived,pages,sections)
- local g = derived[""] if not g then g = { } derived[""] = g end -- global
+ local derived_g = derived[""] -- global
+ if not derived_g then
+ derived_g = { }
+ derived[""] = derived_g
+ end
for i=1,#collected do
- local entry = collected[i]
- local m, r = entry.metadata, entry.references
- if m and r then
- local reference = r.reference or ""
- local prefix = r.referenceprefix or ""
- local component = r.component and r.component or ""
- if reference ~= "" then
- local kind, realpage = m.kind, r.realpage
- if kind and realpage then
- local d = derived[prefix]
- if not d then
- d = { }
- derived[prefix] = d
- end
- local c = derived[component]
- if not c then
- c = { }
- derived[component] = c
- end
- local t = { kind, i, entry }
- for s in gmatch(reference,"%s*([^,]+)") do
- if trace_referencing then
- report_references("list entry %a provides %a reference %a on realpage %a",i,kind,s,realpage)
+ local entry = collected[i]
+ local metadata = entry.metadata
+ if metadata then
+ local kind = metadata.kind
+ if kind then
+ local references = entry.references
+ if references then
+ local reference = references.reference
+ if reference and reference ~= "" then
+ local realpage = references.realpage
+ if realpage then
+ local prefix = references.referenceprefix
+ local component = references.component
+ local derived_p = nil
+ local derived_c = nil
+ if prefix and prefix ~= "" then
+ derived_p = derived[prefix]
+ if not derived_p then
+ derived_p = { }
+ derived[prefix] = derived_p
+ end
+ end
+ if component and component ~= "" and component ~= prefix then
+ derived_c = derived[component]
+ if not derived_c then
+ derived_c = { }
+ derived[component] = derived_c
+ end
+ end
+ local function action(s)
+ if trace_referencing then
+ report_references("list entry %a provides %a reference %a on realpage %a",i,kind,s,realpage)
+ end
+ if derived_p and not derived_p[s] then
+ derived_p[s] = entry
+ end
+ if derived_c and not derived_c[s] then
+ derived_c[s] = entry
+ end
+ if not derived_g[s] then
+ derived_g[s] = entry -- first wins
+ end
+ end
+ process_settings(reference,action)
end
- c[s] = c[s] or t -- share them
- d[s] = d[s] or t -- share them
- g[s] = g[s] or t -- first wins
end
end
end
end
end
--- inspect(derived)
+ -- inspect(derived)
end
references.registerinitializer(function() register_from_lists(lists.collected,derived) end)
-- urls
-references.urls = references.urls or { }
-references.urls.data = references.urls.data or { }
-
-local urls = references.urls.data
+local urls = references.urls or { }
+references.urls = urls
+local urldata = urls.data or { }
+urls.data = urldata
-function references.urls.define(name,url,file,description)
+function urls.define(name,url,file,description)
if name and name ~= "" then
- urls[name] = { url or "", file or "", description or url or file or ""}
+ urldata[name] = { url or "", file or "", description or url or file or ""}
end
end
-local pushcatcodes = context.pushcatcodes
-local popcatcodes = context.popcatcodes
-local txtcatcodes = catcodes.numbers.txtcatcodes -- or just use "txtcatcodes"
-
-function references.urls.get(name)
- local u = urls[name]
+function urls.get(name)
+ local u = urldata[name]
if u then
local url, file = u[1], u[2]
if file and file ~= "" then
@@ -442,58 +488,58 @@ function references.urls.get(name)
end
function commands.geturl(name)
- local url = references.urls.get(name)
+ local url = urls.get(name)
if url and url ~= "" then
- pushcatcodes(txtcatcodes)
+ ctx_pushcatcodes(txtcatcodes)
context(url)
- popcatcodes()
+ ctx_popcatcodes()
end
end
-- function commands.gethyphenatedurl(name,...)
--- local url = references.urls.get(name)
+-- local url = urls.get(name)
-- if url and url ~= "" then
-- hyphenatedurl(url,...)
-- end
-- end
function commands.doifurldefinedelse(name)
- commands.doifelse(urls[name])
+ commands.doifelse(urldata[name])
end
-commands.useurl= references.urls.define
+commands.useurl= urls.define
-- files
-references.files = references.files or { }
-references.files.data = references.files.data or { }
-
-local files = references.files.data
+local files = references.files or { }
+references.files = files
+local filedata = files.data or { }
+files.data = filedata
-function references.files.define(name,file,description)
+function files.define(name,file,description)
if name and name ~= "" then
- files[name] = { file or "", description or file or "" }
+ filedata[name] = { file or "", description or file or "" }
end
end
-function references.files.get(name,method,space) -- method: none, before, after, both, space: yes/no
- local f = files[name]
+function files.get(name,method,space) -- method: none, before, after, both, space: yes/no
+ local f = filedata[name]
if f then
context(f[1])
end
end
function commands.doiffiledefinedelse(name)
- commands.doifelse(files[name])
+ commands.doifelse(filedata[name])
end
-commands.usefile= references.files.define
+commands.usefile= files.define
-- helpers
function references.checkedfile(whatever) -- return whatever if not resolved
if whatever then
- local w = files[whatever]
+ local w = filedata[whatever]
if w then
return w[1]
else
@@ -504,7 +550,7 @@ end
function references.checkedurl(whatever) -- return whatever if not resolved
if whatever then
- local w = urls[whatever]
+ local w = urldata[whatever]
if w then
local u, f = w[1], w[2]
if f and f ~= "" then
@@ -520,11 +566,11 @@ end
function references.checkedfileorurl(whatever,default) -- return nil, nil if not resolved
if whatever then
- local w = files[whatever]
+ local w = filedata[whatever]
if w then
return w[1], nil
else
- local w = urls[whatever]
+ local w = urldata[whatever]
if w then
local u, f = w[1], w[2]
if f and f ~= "" then
@@ -540,25 +586,25 @@ end
-- programs
-references.programs = references.programs or { }
-references.programs.data = references.programs.data or { }
-
-local programs = references.programs.data
+local programs = references.programs or { }
+references.programs = programs
+local programdata = programs.data or { }
+programs.data = programdata
-function references.programs.define(name,file,description)
+function programs.define(name,file,description)
if name and name ~= "" then
- programs[name] = { file or "", description or file or ""}
+ programdata[name] = { file or "", description or file or ""}
end
end
-function references.programs.get(name)
- local f = programs[name]
+function programs.get(name)
+ local f = programdata[name]
return f and f[1]
end
function references.checkedprogram(whatever) -- return whatever if not resolved
if whatever then
- local w = programs[whatever]
+ local w = programdata[whatever]
if w then
return w[1]
else
@@ -567,10 +613,10 @@ function references.checkedprogram(whatever) -- return whatever if not resolved
end
end
-commands.defineprogram = references.programs.define
+commands.defineprogram = programs.define
function commands.getprogram(name)
- local f = programs[name]
+ local f = programdata[name]
if f then
context(f[1])
end
@@ -579,11 +625,11 @@ end
-- shared by urls and files
function references.whatfrom(name)
- context((urls[name] and v_url) or (files[name] and v_file) or v_unknown)
+ context((urldata[name] and v_url) or (filedata[name] and v_file) or v_unknown)
end
function references.from(name)
- local u = urls[name]
+ local u = urldata[name]
if u then
local url, file, description = u[1], u[2], u[3]
if description ~= "" then
@@ -595,7 +641,7 @@ function references.from(name)
return url
end
else
- local f = files[name]
+ local f = filedata[name]
if f then
local file, description = f[1], f[2]
if description ~= "" then
@@ -608,25 +654,25 @@ function references.from(name)
end
function commands.from(name)
- local u = urls[name]
+ local u = urldata[name]
if u then
local url, file, description = u[1], u[2], u[3]
if description ~= "" then
- context.dofromurldescription(description)
+ ctx_dofromurldescription(description)
-- ok
elseif file and file ~= "" then
- context.dofromurlliteral(url .. "/" .. file)
+ ctx_dofromurlliteral(url .. "/" .. file)
else
- context.dofromurlliteral(url)
+ ctx_dofromurlliteral(url)
end
else
- local f = files[name]
+ local f = filedata[name]
if f then
local file, description = f[1], f[2]
if description ~= "" then
- context.dofromfiledescription(description)
+ ctx_dofromfiledescription(description)
else
- context.dofromfileliteral(file)
+ ctx_dofromfileliteral(file)
end
end
end
@@ -634,7 +680,7 @@ end
function references.define(prefix,reference,list)
local d = defined[prefix] if not d then d = { } defined[prefix] = d end
- d[reference] = { "defined", list }
+ d[reference] = list
end
function references.reset(prefix,reference)
@@ -655,33 +701,92 @@ commands.resetreference = references.reset
-- to what extend do we check the non prefixed variant
-local strict = false
+-- local strict = false
+--
+-- local function resolve(prefix,reference,args,set) -- we start with prefix,reference
+-- if reference and reference ~= "" then
+-- if not set then
+-- set = { prefix = prefix, reference = reference }
+-- else
+-- if not set.reference then set.reference = reference end
+-- if not set.prefix then set.prefix = prefix end
+-- end
+-- local r = settings_to_array(reference)
+-- for i=1,#r do
+-- local ri = r[i]
+-- local d
+-- if strict then
+-- d = defined[prefix] or defined[""]
+-- d = d and d[ri]
+-- else
+-- d = defined[prefix]
+-- d = d and d[ri]
+-- if not d then
+-- d = defined[""]
+-- d = d and d[ri]
+-- end
+-- end
+-- if d then
+-- resolve(prefix,d,nil,set)
+-- else
+-- local var = splitreference(ri)
+-- if var then
+-- var.reference = ri
+-- local vo, vi = var.outer, var.inner
+-- if not vo and vi then
+-- -- to be checked
+-- if strict then
+-- d = defined[prefix] or defined[""]
+-- d = d and d[vi]
+-- else
+-- d = defined[prefix]
+-- d = d and d[vi]
+-- if not d then
+-- d = defined[""]
+-- d = d and d[vi]
+-- end
+-- end
+-- --
+-- if d then
+-- resolve(prefix,d,var.arguments,set) -- args can be nil
+-- else
+-- if args then var.arguments = args end
+-- set[#set+1] = var
+-- end
+-- else
+-- if args then var.arguments = args end
+-- set[#set+1] = var
+-- end
+-- if var.has_tex then
+-- set.has_tex = true
+-- end
+-- else
+-- -- report_references("funny pattern %a",ri)
+-- end
+-- end
+-- end
+-- return set
+-- else
+-- return { }
+-- end
+-- end
+
+setmetatableindex(defined,"table")
local function resolve(prefix,reference,args,set) -- we start with prefix,reference
if reference and reference ~= "" then
if not set then
set = { prefix = prefix, reference = reference }
else
- set.reference = set.reference or reference
- set.prefix = set.prefix or prefix
+ if not set.reference then set.reference = reference end
+ if not set.prefix then set.prefix = prefix end
end
local r = settings_to_array(reference)
for i=1,#r do
local ri = r[i]
- local d
- if strict then
- d = defined[prefix] or defined[""]
- d = d and d[ri]
- else
- d = defined[prefix]
- d = d and d[ri]
- if not d then
- d = defined[""]
- d = d and d[ri]
- end
- end
+ local d = defined[prefix][ri] or defined[""][ri]
if d then
- resolve(prefix,d[2],nil,set)
+ resolve(prefix,d,nil,set)
else
local var = splitreference(ri)
if var then
@@ -689,20 +794,10 @@ local function resolve(prefix,reference,args,set) -- we start with prefix,refere
local vo, vi = var.outer, var.inner
if not vo and vi then
-- to be checked
- if strict then
- d = defined[prefix] or defined[""]
- d = d and d[vi]
- else
- d = defined[prefix]
- d = d and d[vi]
- if not d then
- d = defined[""]
- d = d and d[vi]
- end
- end
+ d = defined[prefix][vi] or defined[""][vi]
--
if d then
- resolve(prefix,d[2],var.arguments,set) -- args can be nil
+ resolve(prefix,d,var.arguments,set) -- args can be nil
else
if args then var.arguments = args end
set[#set+1] = var
@@ -737,21 +832,18 @@ function commands.setreferencearguments(k,v)
references.currentset[k].arguments = v
end
-local expandreferenceoperation = context.expandreferenceoperation
-local expandreferencearguments = context.expandreferencearguments
-
function references.expandcurrent() -- todo: two booleans: o_has_tex& a_has_tex
local currentset = references.currentset
if currentset and currentset.has_tex then
for i=1,#currentset do
local ci = currentset[i]
local operation = ci.operation
- if operation and find(operation,"\\") then -- if o_has_tex then
- expandreferenceoperation(i,operation)
+ if operation and find(operation,"\\",1,true) then -- if o_has_tex then
+ ctx_expandreferenceoperation(i,operation)
end
local arguments = ci.arguments
- if arguments and find(arguments,"\\") then -- if a_has_tex then
- expandreferencearguments(i,arguments)
+ if arguments and find(arguments,"\\",1,true) then -- if a_has_tex then
+ ctx_expandreferencearguments(i,arguments)
end
end
end
@@ -807,13 +899,21 @@ local function loadexternalreferences(name,utilitydata)
target = { }
external[prefix] = target
end
- for s in gmatch(reference,"%s*([^,]+)") do
+ -- for s in gmatch(reference,"%s*([^,]+)") do
+ -- if trace_importing then
+ -- report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a",
+ -- "external",kind,name,prefix,s)
+ -- end
+ -- target[s] = target[s] or entry
+ -- end
+ local function action(s)
if trace_importing then
report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a",
"external",kind,name,prefix,s)
end
target[s] = target[s] or entry
end
+ process_settings(reference,action)
end
end
end
@@ -825,8 +925,8 @@ end
local externalfiles = { }
-table.setmetatableindex(externalfiles, function(t,k)
- local v = files[k]
+setmetatableindex(externalfiles, function(t,k)
+ local v = filedata[k]
if not v then
v = { k, k }
end
@@ -834,7 +934,7 @@ table.setmetatableindex(externalfiles, function(t,k)
return v
end)
-table.setmetatableindex(externals,function(t,k) -- either or not automatically
+setmetatableindex(externals, function(t,k) -- either or not automatically
local filename = externalfiles[k][1] -- filename
local fullname = file.replacesuffix(filename,"tuc")
if lfs.isfile(fullname) then -- todo: use other locator
@@ -921,7 +1021,7 @@ local function loadproductreferences(productname,componentname,utilitydata)
ptarget = { }
productreferences[prefix] = ptarget
end
- for s in gmatch(reference,"%s*([^,]+)") do
+ local function action(s)
if ptarget then
if trace_importing then
report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a",
@@ -937,6 +1037,7 @@ local function loadproductreferences(productname,componentname,utilitydata)
ctarget[s] = ctarget[s] or entry
end
end
+ process_settings(reference,action)
end
end
end
@@ -1014,7 +1115,7 @@ references.registerinitializer(function(tobesaved,collected)
productdata.components = componentlist(job.structure.collected) or { }
end)
-function structures.references.loadpresets(product,component) -- we can consider a special components hash
+function references.loadpresets(product,component) -- we can consider a special components hash
if product and component and product~= "" and component ~= "" and not productdata.product then -- maybe: productdata.filename ~= filename
productdata.product = product
productdata.component = component
@@ -1034,7 +1135,7 @@ function structures.references.loadpresets(product,component) -- we can consider
end
end
-structures.references.productdata = productdata
+references.productdata = productdata
local useproduct = commands.useproduct
@@ -1048,7 +1149,7 @@ if useproduct then
if trace_referencing or trace_importing then
report_references("loading presets for component %a of product %a",component,product)
end
- structures.references.loadpresets(product,component)
+ references.loadpresets(product,component)
end
end
end
@@ -1146,7 +1247,7 @@ local function identify_arguments(set,var,i)
local s = specials[var.inner]
if s then
-- inner{argument}
- var.kind = "special with arguments"
+ var.kind = "special operation with arguments"
else
var.error = "unknown inner or special"
end
@@ -1156,114 +1257,105 @@ local function identify_arguments(set,var,i)
return var
end
-local function identify_inner(set,var,prefix,collected,derived,tobesaved)
+-- needs checking: if we don't do too much (redundant) checking now
+-- inner ... we could move the prefix logic into the parser so that we have 'm for each entry
+-- foo:bar -> foo == prefix (first we try the global one)
+-- -:bar -> ignore prefix
+
+local function finish_inner(var,p,i)
+ var.kind = "inner"
+ var.i = i
+ var.p = p
+ var.r = (i.references and i.references.realpage) or (i.pagedata and i.pagedata.realpage) or 1
+ return var
+end
+
+local function identify_inner(set,var,prefix,collected,derived)
local inner = var.inner
- local outer = var.outer
- -- inner ... we could move the prefix logic into the parser so that we have 'm for each entry
- -- foo:bar -> foo == prefix (first we try the global one)
- -- -:bar -> ignore prefix
- local p, i = prefix, nil
- local splitprefix, splitinner
-- the next test is a safeguard when references are auto loaded from outer
- if inner then
- splitprefix, splitinner = lpegmatch(prefixsplitter,inner)
+ if not inner or inner == "" then
+ return false
end
- -- these are taken from other anonymous references
+ local splitprefix, splitinner = lpegmatch(prefixsplitter,inner)
if splitprefix and splitinner then
+ -- we check for a prefix:reference instance in the regular set of collected
+ -- references; a special case is -: which forces a lookup in the global list
if splitprefix == "-" then
- i = collected[""]
- i = i and i[splitinner]
- if i then
- p = ""
- end
- else
- i = collected[splitprefix]
- i = i and i[splitinner]
+ local i = collected[""]
if i then
- p = splitprefix
+ i = i[splitinner]
+ if i then
+ return finish_inner(var,"",i)
+ end
end
end
- end
- -- todo: strict here
- if not i then
- i = collected[prefix]
- i = i and i[inner]
- if i then
- p = prefix
- end
- end
- if not i and prefix ~= "" then
- i = collected[""]
- i = i and i[inner]
+ local i = collected[splitprefix]
if i then
- p = ""
+ i = i[splitinner]
+ if i then
+ return finish_inner(var,splitprefix,i)
+ end
end
- end
- if i then
- var.i = { "reference", i }
- resolvers.reference(var)
- var.kind = "inner"
- var.p = p
- elseif derived then
- -- these are taken from other data structures (like lists)
- if splitprefix and splitinner then
+ if derived then
+ -- next we look for a reference in the regular set of collected references
+ -- using the prefix that is active at this moment (so we overload the given
+ -- these are taken from other data structures (like lists)
if splitprefix == "-" then
- i = derived[""]
- i = i and i[splitinner]
+ local i = derived[""]
if i then
- p = ""
+ i = i[splitinner]
+ if i then
+ return finish_inner(var,"",i)
+ end
end
- else
- i = derived[splitprefix]
- i = i and i[splitinner]
+ end
+ local i = derived[splitprefix]
+ if i then
+ i = i[splitinner]
if i then
- p = splitprefix
+ return finish_inner(var,splitprefix,i)
end
end
end
- if not i then
- i = derived[prefix]
- i = i and i[inner]
- if i then
- p = prefix
- end
+ end
+ -- we now ignore the split prefix and treat the whole inner as a potential
+ -- referenice into the global list
+ local i = collected[prefix]
+ if i then
+ i = i[inner]
+ if i then
+ return finish_inner(var,prefix,i)
end
- if not i and prefix ~= "" then
- i = derived[""]
- i = i and i[inner]
+ end
+ if not i and derived then
+ -- and if not found we look in the derived references
+ local i = derived[prefix]
+ if i then
+ i = i[inner]
if i then
- p = ""
+ return finish_inner(var,prefix,i)
end
end
+ end
+ return false
+end
+
+local function unprefixed_inner(set,var,prefix,collected,derived,tobesaved)
+ local inner = var.inner
+ local s = specials[inner]
+ if s then
+ var.kind = "special"
+ else
+ local i = (collected and collected[""] and collected[""][inner]) or
+ (derived and derived [""] and derived [""][inner]) or
+ (tobesaved and tobesaved[""] and tobesaved[""][inner])
if i then
var.kind = "inner"
- var.i = i
- var.p = p
- local ri = resolvers[i[1]]
- if ri then
- ri(var)
- else
- -- can't happen as we catch it with a metatable now
- report_references("unknown inner resolver for %a",i[1])
- end
+ var.p = ""
+ var.i = i
+ var.r = (i.references and i.references.realpage) or (i.pagedata and i.pagedata.realpage) or 1
else
- -- no prefixes here
- local s = specials[inner]
- if s then
- var.kind = "special"
- else
- i = (collected and collected[""] and collected[""][inner]) or
- (derived and derived [""] and derived [""][inner]) or
- (tobesaved and tobesaved[""] and tobesaved[""][inner])
- if i then
- var.kind = "inner"
- var.i = { "reference", i }
- resolvers.reference(var)
- var.p = ""
- else
- var.error = "unknown inner or special"
- end
- end
+ var.error = "unknown inner or special"
end
end
return var
@@ -1274,9 +1366,8 @@ local function identify_outer(set,var,i)
local inner = var.inner
local external = externals[outer]
if external then
- local v = copytable(var)
- v = identify_inner(set,v,nil,external)
- if v.i and not v.error then
+ local v = identify_inner(set,var,nil,external)
+ if v then
v.kind = "outer with inner"
set.external = true
if trace_identifying then
@@ -1284,9 +1375,8 @@ local function identify_outer(set,var,i)
end
return v
end
- v = copytable(var)
- local v = identify_inner(set,v,v.outer,external)
- if v.i and not v.error then
+ local v = identify_inner(set,var,var.outer,external)
+ if v then
v.kind = "outer with inner"
set.external = true
if trace_identifying then
@@ -1297,8 +1387,8 @@ local function identify_outer(set,var,i)
end
local external = productdata.componentreferences[outer]
if external then
- local v = identify_inner(set,copytable(var),nil,external)
- if v.i and not v.error then
+ local v = identify_inner(set,var,nil,external)
+ if v then
v.kind = "outer with inner"
set.external = true
if trace_identifying then
@@ -1325,6 +1415,8 @@ local function identify_outer(set,var,i)
local arguments = var.arguments
local operation = var.operation
if inner then
+ -- tricky: in this case we can only use views when we're sure that all inners
+ -- are flushed in the outer document so that should become an option
if arguments then
-- outer::inner{argument}
var.kind = "outer with inner with arguments"
@@ -1332,9 +1424,9 @@ local function identify_outer(set,var,i)
-- outer::inner
var.kind = "outer with inner"
end
- var.i = { "reference", inner }
- resolvers.reference(var)
+ var.i = inner
var.f = outer
+ var.r = (inner.references and inner.references.realpage) or (inner.pagedata and inner.pagedata.realpage) or 1
if trace_identifying then
report_identify_outer(set,var,i,"2e")
end
@@ -1371,46 +1463,62 @@ local function identify_outer(set,var,i)
return var
end
+-- todo: avoid copy
+
local function identify_inner_or_outer(set,var,i)
-- here we fall back on product data
local inner = var.inner
if inner and inner ~= "" then
- local v = identify_inner(set,copytable(var),set.prefix,collected,derived,tobesaved)
- if v.i and not v.error then
- v.kind = "inner" -- check this
+
+ -- first we look up in collected and derived using the current prefix
+
+ local prefix = set.prefix
+
+ local v = identify_inner(set,var,set.prefix,collected,derived)
+ if v then
if trace_identifying then
report_identify_outer(set,v,i,"4a")
end
return v
end
-local components = job.structure.components
+ -- nest we look at each component (but we can omit the already consulted one
-if components then
- for i=1,#components do
- local component = components[i]
- local data = collected[component]
- local vi = data and data[inner]
- if vi then
- var.outer = component
- var.i = vi
- var.kind = "outer with inner"
- set.external = true
+ local components = job.structure.components
+ if components then
+ for c=1,#components do
+ local component = components[c]
+ if component ~= prefix then
+ local v = identify_inner(set,var,component,collected,derived)
+ if v then
+ if trace_identifying then
+ report_identify_outer(set,var,i,"4b")
+ end
+ return v
+ end
+ end
+ end
+ end
+
+ -- as a last resort we will consult the global lists
+
+ local v = unprefixed_inner(set,var,"",collected,derived,tobesaved)
+ if v then
if trace_identifying then
- report_identify_outer(set,var,i,"4x")
+ report_identify_outer(set,v,i,"4c")
end
- return var
+ return v
end
- end
-end
+
+ -- not it gets bad ... we need to look in external files ... keep in mind that
+ -- we can best use explicit references for this ... we might issue a warning
local componentreferences = productdata.componentreferences
local productreferences = productdata.productreferences
local components = productdata.components
if components and componentreferences then
- -- for component, data in next, productdata.componentreferences do -- better do this in order of processing:
- for i=1,#components do
- local component = components[i]
+ for c=1,#components do
+ local component = components[c]
local data = componentreferences[component]
if data then
local d = data[""]
@@ -1421,7 +1529,7 @@ end
var.kind = "outer with inner"
set.external = true
if trace_identifying then
- report_identify_outer(set,var,i,"4b")
+ report_identify_outer(set,var,i,"4d")
end
return var
end
@@ -1441,7 +1549,7 @@ end
var.kind = "outer with inner"
set.external = true
if trace_identifying then
- report_identify_outer(set,var,i,"4c")
+ report_identify_outer(set,var,i,"4e")
end
return var
end
@@ -1456,7 +1564,7 @@ end
var.kind = "outer with inner"
set.external = true
if trace_identifying then
- report_identify_outer(set,var,i,"4d")
+ report_identify_outer(set,var,i,"4f")
end
return var
end
@@ -1467,30 +1575,18 @@ end
var.error = "no inner"
end
if trace_identifying then
- report_identify_outer(set,var,i,"4e")
+ report_identify_outer(set,var,i,"4g")
end
return var
end
--- local function identify_inner_or_outer(set,var,i)
--- -- we might consider first checking with a prefix prepended and then without
--- -- which is better for fig:oeps
--- local var = do_identify_inner_or_outer(set,var,i)
--- if var.error then
--- local prefix = set.prefix
--- if prefix and prefix ~= "" then
--- var.inner = prefix .. ':' .. var.inner
--- var.error = nil
--- return do_identify_inner_or_outer(set,var,i)
--- end
--- end
--- return var
--- end
-
local function identify_inner_component(set,var,i)
-- we're in a product (maybe ignore when same as component)
local component = var.component
- identify_inner(set,var,component,collected,derived,tobesaved)
+ local v = identify_inner(set,var,component,collected,derived)
+ if not v then
+ var.error = "unknown inner in component"
+ end
if trace_identifying then
report_identify_outer(set,var,i,"5a")
end
@@ -1547,7 +1643,7 @@ local function identify(prefix,reference)
end
local set = resolve(prefix,reference)
local bug = false
- texcount.referencehastexstate = set.has_tex and 1 or 0
+ texsetcount("referencehastexstate",set.has_tex and 1 or 0)
nofidentified = nofidentified + 1
set.n = nofidentified
for i=1,#set do
@@ -1626,80 +1722,100 @@ end
luatex.registerstopactions(references.reportproblems)
-local innermethod = "names"
+-- The auto method will try to avoid named internals in a clever way which
+-- can make files smaller without sacrificing external references. Some of
+-- the housekeeping happens the backend side.
+
+local innermethod = v_auto -- only page|auto now
+local defaultinnermethod = defaultinnermethod
+references.innermethod = innermethod -- don't mess with this one directly
function references.setinnermethod(m)
- if m then
- if m == "page" or m == "mixed" or m == "names" then
- innermethod = m
- elseif m == true or m == v_yes then
- innermethod = "page"
- end
+ if toboolean(m) or m == v_page then
+ innermethod = v_page
+ else
+ innermethod = v_auto
end
+ references.innermethod = innermethod
function references.setinnermethod()
report_references("inner method is already set and frozen to %a",innermethod)
end
end
function references.getinnermethod()
- return innermethod or "names"
+ return innermethod or defaultinnermethod
end
-directives.register("references.linkmethod", function(v) -- page mixed names
+directives.register("references.linkmethod", function(v) -- page auto
references.setinnermethod(v)
end)
-- this is inconsistent
-function references.setinternalreference(prefix,tag,internal,view) -- needs checking
- if innermethod == "page" then
- return unsetvalue
- else
+local destinationattributes = { }
+
+local function setinternalreference(prefix,tag,internal,view) -- needs checking
+ local destination = unsetvalue
+ if innermethod == v_auto then
local t, tn = { }, 0 -- maybe add to current
if tag then
if prefix and prefix ~= "" then
prefix = prefix .. ":" -- watch out, : here
- for ref in gmatch(tag,"[^,]+") do
+ local function action(ref)
tn = tn + 1
t[tn] = prefix .. ref
end
+ process_settings(tag,action)
else
- for ref in gmatch(tag,"[^,]+") do
+ local function action(ref)
tn = tn + 1
t[tn] = ref
end
+ process_settings(tag,action)
end
end
- if internal and innermethod == "names" then -- mixed or page
+ -- ugly .. later we decide to ignore it when we have a real one
+ -- but for testing we might want to see them all
+ if internal then
tn = tn + 1
- t[tn] = "aut:" .. internal
+ t[tn] = internal -- when number it's internal
end
- local destination = references.mark(t,nil,nil,view) -- returns an attribute
- texcount.lastdestinationattribute = destination
- return destination
+ destination = references.mark(t,nil,nil,view) -- returns an attribute
end
+ if internal then -- new
+ destinationattributes[internal] = destination
+ end
+ texsetcount("lastdestinationattribute",destination)
+ return destination
+end
+
+local function getinternalreference(internal)
+ return destinationattributes[internal] or 0
end
+references.setinternalreference = setinternalreference
+references.getinternalreference = getinternalreference
+commands.setinternalreference = setinternalreference
+commands.getinternalreference = getinternalreference
+
function references.setandgetattribute(kind,prefix,tag,data,view) -- maybe do internal automatically here
- local attr = references.set(kind,prefix,tag,data) and references.setinternalreference(prefix,tag,nil,view) or unsetvalue
- texcount.lastdestinationattribute = attr
+ local attr = references.set(kind,prefix,tag,data) and setinternalreference(prefix,tag,nil,view) or unsetvalue
+ texsetcount("lastdestinationattribute",attr)
return attr
end
commands.setreferenceattribute = references.setandgetattribute
-function references.getinternalreference(n) -- n points into list (todo: registers)
+function references.getinternallistreference(n) -- n points into list (todo: registers)
local l = lists.collected[n]
- return l and l.references.internal or n
-end
-
-function commands.setinternalreference(prefix,tag,internal,view) -- needs checking
- context(references.setinternalreference(prefix,tag,internal,view))
+ local i = l and l.references.internal
+ return i and destinationattributes[i] or 0
end
-function commands.getinternalreference(n) -- this will also be a texcount
+function commands.getinternallistreference(n) -- this will also be a texcount
local l = lists.collected[n]
- context(l and l.references.internal or n)
+ local i = l and l.references.internal
+ context(i and destinationattributes[i] or 0)
end
--
@@ -1731,10 +1847,22 @@ end
references.getcurrentprefixspec = getcurrentprefixspec
function commands.getcurrentprefixspec(default)
- context.getreferencestructureprefix(getcurrentprefixspec(default))
+ ctx_getreferencestructureprefix(getcurrentprefixspec(default))
end
-function references.filter(name,...) -- number page title ...
+local genericfilters = { }
+local userfilters = { }
+local textfilters = { }
+local fullfilters = { }
+local sectionfilters = { }
+
+filters.generic = genericfilters
+filters.user = userfilters
+filters.text = textfilters
+filters.full = fullfilters
+filters.section = sectionfilters
+
+local function filterreference(name,...) -- number page title ...
local data = currentreference and currentreference.i -- maybe we should take realpage from here
if data then
if name == "realpage" then
@@ -1743,8 +1871,8 @@ function references.filter(name,...) -- number page title ...
else -- assumes data is table
local kind = type(data) == "table" and data.metadata and data.metadata.kind
if kind then
- local filter = filters[kind] or filters.generic
- filter = filter and (filter[name] or filter.unknown or filters.generic[name] or filters.generic.unknown)
+ local filter = filters[kind] or genericfilters
+ filter = filter and (filter[name] or filter.unknown or genericfilters[name] or genericfilters.unknown)
if filter then
if trace_referencing then
report_references("name %a, kind %a, using dedicated filter",name,kind)
@@ -1764,18 +1892,24 @@ function references.filter(name,...) -- number page title ...
end
end
-function references.filterdefault()
- return references.filter("default",getcurrentprefixspec(v_default))
+local function filterreferencedefault()
+ return filterreference("default",getcurrentprefixspec(v_default))
end
+references.filter = filterreference
+references.filterdefault = filterreferencedefault
+
+commands.filterreference = filterreference
+commands.filterdefaultreference = filterreferencedefault
+
function commands.currentreferencedefault(tag)
- if not tag then tag = "default" end
- references.filter(tag,context.delayed(getcurrentprefixspec(tag)))
+ if not tag then
+ tag = "default"
+ end
+ filterreference(tag,context_delayed(getcurrentprefixspec(tag)))
end
-filters.generic = { }
-
-function filters.generic.title(data)
+function genericfilters.title(data)
if data then
local titledata = data.titledata or data.useddata
if titledata then
@@ -1784,7 +1918,7 @@ function filters.generic.title(data)
end
end
-function filters.generic.text(data)
+function genericfilters.text(data)
if data then
local entries = data.entries or data.useddata
if entries then
@@ -1793,7 +1927,7 @@ function filters.generic.text(data)
end
end
-function filters.generic.number(data,what,prefixspec) -- todo: spec and then no stopper
+function genericfilters.number(data,what,prefixspec) -- todo: spec and then no stopper
if data then
numberdata = lists.reordered(data) -- data.numberdata
if numberdata then
@@ -1801,23 +1935,23 @@ function filters.generic.number(data,what,prefixspec) -- todo: spec and then no
sections.typesetnumber(numberdata,"number",numberdata)
else
local useddata = data.useddata
- if useddata and useddsta.number then
+ if useddata and useddata.number then
context(useddata.number)
end
end
end
end
-filters.generic.default = filters.generic.text
+genericfilters.default = genericfilters.text
-function filters.generic.page(data,prefixspec,pagespec)
+function genericfilters.page(data,prefixspec,pagespec)
local pagedata = data.pagedata
if pagedata then
local number, conversion = pagedata.number, pagedata.conversion
if not number then
-- error
elseif conversion then
- context.convertnumber(conversion,number)
+ ctx_convertnumber(conversion,number)
else
context(number)
end
@@ -1826,14 +1960,12 @@ function filters.generic.page(data,prefixspec,pagespec)
end
end
-filters.user = { }
-
-function filters.user.unknown(data,name)
+function userfilters.unknown(data,name)
if data then
local userdata = data.userdata
local userkind = userdata and userdata.kind
if userkind then
- local filter = filters[userkind] or filters.generic
+ local filter = filters[userkind] or genericfilters
filter = filter and (filter[name] or filter.unknown)
if filter then
filter(data,name)
@@ -1847,9 +1979,7 @@ function filters.user.unknown(data,name)
end
end
-filters.text = { }
-
-function filters.text.title(data)
+function textfilters.title(data)
helpers.title(data.entries.text or "?",data.metadata)
end
@@ -1859,18 +1989,14 @@ end
-- helpers.title(data.entries.text or "?",data.metadata)
-- end
-function filters.text.page(data,prefixspec,pagespec)
+function textfilters.page(data,prefixspec,pagespec)
helpers.prefixpage(data,prefixspec,pagespec)
end
-filters.full = { }
+fullfilters.title = textfilters.title
+fullfilters.page = textfilters.page
-filters.full.title = filters.text.title
-filters.full.page = filters.text.page
-
-filters.section = { }
-
-function filters.section.number(data,what,prefixspec)
+function sectionfilters.number(data,what,prefixspec)
if data then
local numberdata = data.numberdata
if not numberdata then
@@ -1882,7 +2008,7 @@ function filters.section.number(data,what,prefixspec)
local references = data.references
if trace_empty then
report_empty("reference %a has a hidden number",references.reference)
- context.emptyreference() -- maybe an option
+ ctx_emptyreference() -- maybe an option
end
else
sections.typesetnumber(numberdata,"number",prefixspec,numberdata)
@@ -1890,18 +2016,18 @@ function filters.section.number(data,what,prefixspec)
end
end
-filters.section.title = filters.generic.title
-filters.section.page = filters.generic.page
-filters.section.default = filters.section.number
+sectionfilters.title = genericfilters.title
+sectionfilters.page = genericfilters.page
+sectionfilters.default = sectionfilters.number
--- filters.note = { default = filters.generic.number }
--- filters.formula = { default = filters.generic.number }
--- filters.float = { default = filters.generic.number }
--- filters.description = { default = filters.generic.number }
--- filters.item = { default = filters.generic.number }
+-- filters.note = { default = genericfilters.number }
+-- filters.formula = { default = genericfilters.number }
+-- filters.float = { default = genericfilters.number }
+-- filters.description = { default = genericfilters.number }
+-- filters.item = { default = genericfilters.number }
setmetatableindex(filters, function(t,k) -- beware, test with rawget
- local v = { default = filters.generic.number } -- not copy as it might be extended differently
+ local v = { default = genericfilters.number } -- not copy as it might be extended differently
t[k] = v
return v
end)
@@ -1931,7 +2057,8 @@ local specials = references.testspecials
-- real page to determine if we need contrastlocation as that is more lightweight.
local function checkedpagestate(n,page)
- local r, p = referredpage(n), tonumber(page)
+ local r = referredpage(n)
+ local p = tonumber(page)
if not p then
return 0
elseif p > r then
@@ -1944,7 +2071,9 @@ local function checkedpagestate(n,page)
end
local function setreferencerealpage(actions)
- actions = actions or references.currentset
+ if not actions then
+ actions = references.currentset
+ end
if not actions then
return 0
else
@@ -1976,7 +2105,9 @@ end
-- normally such an analysis happens in the backend code
function references.analyze(actions)
- actions = actions or references.currentset
+ if not actions then
+ actions = references.currentset
+ end
if not actions then
actions = { realpage = 0, pagestate = 0 }
elseif actions.pagestate then
@@ -1995,12 +2126,15 @@ function references.analyze(actions)
end
function commands.referencepagestate(actions)
- actions = actions or references.currentset
+ if not actions then
+ actions = references.currentset
+ end
if not actions then
context(0)
else
if not actions.pagestate then
references.analyze(actions) -- delayed unless explicitly asked for
+-- print("NO STATE",actions.reference,actions.pagestate)
end
context(actions.pagestate)
end
@@ -2019,7 +2153,10 @@ local function realpageofpage(p) -- the last one counts !
nofrealpages = #pages
plist = { }
for rp=1,nofrealpages do
- plist[pages[rp].number] = rp
+ local page = pages[rp]
+ if page then
+ plist[page.number] = rp
+ end
end
references.nofrealpages = nofrealpages
end
@@ -2033,7 +2170,7 @@ function references.checkedrealpage(r)
realpageofpage(r) -- just initialize
end
if not r then
- return texcount.realpageno
+ return texgetcount("realpageno")
elseif r < 1 then
return 1
elseif r > nofrealpages then
@@ -2084,7 +2221,7 @@ runners["special operation with arguments"] = runners["special"]
-- check the validity.
function specials.internal(var,actions)
- local v = references.internals[tonumber(var.operation)]
+ local v = internals[tonumber(var.operation)]
local r = v and v.references.realpage
if r then
actions.realpage = r
@@ -2126,7 +2263,7 @@ end
function specials.deltapage(var,actions)
local p = tonumber(var.operation)
if p then
- p = references.checkedrealpage(p + texcount.realpageno)
+ p = references.checkedrealpage(p + texgetcount("realpageno"))
var.r = p
actions.realpage = actions.realpage or p -- first wins
end
@@ -2146,9 +2283,6 @@ end
-- needs a better split ^^^
-commands.filterreference = references.filter
-commands.filterdefaultreference = references.filterdefault
-
-- done differently now:
function references.export(usedname) end
@@ -2156,3 +2290,23 @@ function references.import(usedname) end
function references.load (usedname) end
commands.exportreferences = references.export
+
+-- better done here .... we don't insert/remove, just use a pointer
+
+local prefixstack = { "" }
+local prefixlevel = 1
+
+function commands.pushreferenceprefix(prefix)
+ prefixlevel = prefixlevel + 1
+ prefixstack[prefixlevel] = prefix
+ context(prefix)
+end
+
+function commands.popreferenceprefix()
+ prefixlevel = prefixlevel - 1
+ if prefixlevel > 0 then
+ context(prefixstack[prefixlevel])
+ else
+ report_references("unable to pop referenceprefix")
+ end
+end
diff --git a/Master/texmf-dist/tex/context/base/strc-ref.mkvi b/Master/texmf-dist/tex/context/base/strc-ref.mkvi
index c82a09d20d1..76d79b80221 100644
--- a/Master/texmf-dist/tex/context/base/strc-ref.mkvi
+++ b/Master/texmf-dist/tex/context/base/strc-ref.mkvi
@@ -75,6 +75,8 @@
%D document). By setting the \type{interaction} variable, one
%D can influences the way interactive references are set.
+\let\referenceprefix\empty
+
\installcorenamespace{referencing}
\installdirectcommandhandler \??referencing {referencing} % \??referencing
@@ -191,12 +193,12 @@
\globallet\currentreferencecoding\s!tex
\fi
% beware, the structures.references.set writes a
- % \setnextinternalreference
+ \setnextinternalreference
\strc_references_start_destination_nodes
\ctxcommand{setreferenceattribute("\currentreferencekind", "\referenceprefix","\currentreferencelabels",
{
references = {
- % internal = \nextinternalreference, % no need for an internal as we have an explicit
+ internal = \nextinternalreference,
block = "\currentsectionblock",
section = structures.sections.currentid(),
},
@@ -207,14 +209,14 @@
xmlroot = \ifx\currentreferencecoding\s!xml "\xmldocument"\else nil\fi, % only useful when text
\fi
},
- \ifx\currentreferencedata\empty\else
- entries = {
- text = \!!bs\currentreferencedata\!!es
- },
- \fi
- \ifx\currentreferenceuserdata\empty\else
- userdata = structures.helpers.touserdata(\!!bs\detokenize{#userdata}\!!es)
- \fi
+ \ifx\currentreferencedata\empty\else
+ entries = {
+ text = \!!bs\currentreferencedata\!!es
+ },
+ \fi
+ \ifx\currentreferenceuserdata\empty\else
+ userdata = structures.helpers.touserdata(\!!bs\detokenize{#userdata}\!!es)
+ \fi
},"\interactionparameter\c!focus")
}%
\strc_references_stop_destination_nodes
@@ -230,6 +232,7 @@
\dontleavehmode\hbox attr \destinationattribute\lastdestinationattribute\bgroup
\strc_references_flush_destination_nodes
\egroup
+ \prewordbreak % new
\fi}
\def\strc_references_set_page_only_destination_attribute#labels% could in fact be fully expandable
@@ -240,9 +243,11 @@
\lastdestinationattribute\attributeunsetvalue
\else
\strc_references_start_destination_nodes
- \ctxcommand{setreferenceattribute("\s!page", "\referenceprefix","\currentreferencelabels",
+\setnextinternalreference
+ \ctxcommand{setreferenceattribute("\s!page", "\referenceprefix","\currentreferencelabels",
{
references = {
+ internal = \nextinternalreference,
block = "\currentsectionblock",
section = structures.sections.currentid(),
},
@@ -258,6 +263,42 @@
\lastdestinationattribute\attributeunsetvalue
\fi}
+\unexpanded\def\strc_references_direct_full#labels#text%
+ {\ifreferencing
+ \strc_references_start_destination_nodes
+\setnextinternalreference
+ \ctxcommand{setreferenceattribute("\s!full", "\referenceprefix","#labels",
+ {
+ references = {
+ internal = \nextinternalreference,
+ block = "\currentsectionblock",
+ section = structures.sections.currentid(),
+ },
+ metadata = {
+ kind = "\s!full",
+ },
+ entries = {
+ text = \!!bs#text\!!es
+ },
+ },"\interactionparameter\c!focus")
+ }%
+ \strc_references_stop_destination_nodes
+ \else
+ \setbox\b_strc_destination_nodes\emptyhbox
+ \lastdestinationattribute\attributeunsetvalue
+ \fi
+ % will become obsolete:
+ \xdef\currentdestinationattribute{\number\lastdestinationattribute}%
+ % will become an option:
+ \ifnum\lastdestinationattribute>\zerocount
+ \dontleavehmode\hbox attr \destinationattribute\lastdestinationattribute\bgroup
+ \strc_references_flush_destination_nodes
+ \egroup
+ \prewordbreak % new
+ \fi}
+
+\let\dodirectfullreference\strc_references_direct_full % for at lua end
+
\def\strc_references_set_page_only_destination_box_attribute#cs#labels%
{\strc_references_set_page_only_destination_attribute{#labels}%
\ifnum\lastdestinationattribute>\zerocount
@@ -771,12 +812,24 @@
\unexpanded\def\setupglobalreferenceprefix[#prefix]%
{\xdef\referenceprefix{#prefix}}
+% \unexpanded\def\pushreferenceprefix#prefix%
+% {\pushmacro\referenceprefix
+% \xdef\referenceprefix{#prefix}} % global
+
+% \unexpanded\def\popreferenceprefix
+% {\popmacro\referenceprefix}
+
+\unexpanded\def\globalpushreferenceprefix#prefix%
+ {\xdef\referenceprefix{\ctxcommand{pushreferenceprefix("#prefix")}}}
+
+\unexpanded\def\globalpopreferenceprefix
+ {\xdef\referenceprefix{\ctxcommand{popreferenceprefix()}}}
+
\unexpanded\def\pushreferenceprefix#prefix%
- {\pushmacro\referenceprefix
- \xdef\referenceprefix{#prefix}}
+ {\edef\referenceprefix{\ctxcommand{pushreferenceprefix("#prefix")}}}
\unexpanded\def\popreferenceprefix
- {\popmacro\referenceprefix}
+ {\edef\referenceprefix{\ctxcommand{popreferenceprefix()}}}
\def\m_strc_references_prefix_yes{+}
\def\m_strc_references_prefix_nop{-}
@@ -799,7 +852,7 @@
\fi\fi\fi\fi}
\appendtoks
- \setupreferenceprefix[\referencingparameter\c!prefix]
+ \setupreferenceprefix[\referencingparameter\c!prefix]
\to \everysetupreferencing
%D We can typeset a reference using \type{\in}, \type{\at} and
@@ -898,11 +951,12 @@
\begingroup
\let\crlf\space
\let\\\space
- \postponenotes
+ \postponenotes % might go
\referencingparameter\c!left
\doifreferencefoundelse{#label}
{\goto{\limitatetext\currentreferencetitle{\referencingparameter\c!width}\unknown}[#label]}% not so efficient (dup lookup)
{}% todo
+ \flushnotes % might go
\referencingparameter\c!right
\endgroup}
diff --git a/Master/texmf-dist/tex/context/base/strc-reg.lua b/Master/texmf-dist/tex/context/base/strc-reg.lua
index 40cd3455b28..bdb2e0d6713 100644
--- a/Master/texmf-dist/tex/context/base/strc-reg.lua
+++ b/Master/texmf-dist/tex/context/base/strc-reg.lua
@@ -7,51 +7,101 @@ if not modules then modules = { } end modules ['strc-reg'] = {
}
local next, type = next, type
-local texcount = tex.count
local format, gmatch = string.format, string.gmatch
local equal, concat, remove = table.are_equal, table.concat, table.remove
local utfchar = utf.char
local lpegmatch = lpeg.match
local allocate = utilities.storage.allocate
-local trace_registers = false trackers.register("structures.registers", function(v) trace_registers = v end)
+local trace_registers = false trackers.register("structures.registers", function(v) trace_registers = v end)
-local report_registers = logs.reporter("structure","registers")
+local report_registers = logs.reporter("structure","registers")
-local structures = structures
-local registers = structures.registers
-local helpers = structures.helpers
-local sections = structures.sections
-local documents = structures.documents
-local pages = structures.pages
-local references = structures.references
+local structures = structures
+local registers = structures.registers
+local helpers = structures.helpers
+local sections = structures.sections
+local documents = structures.documents
+local pages = structures.pages
+local references = structures.references
-local mappings = sorters.mappings
-local entries = sorters.entries
-local replacements = sorters.replacements
+local usedinternals = references.usedinternals
-local processors = typesetters.processors
-local splitprocessor = processors.split
+local mappings = sorters.mappings
+local entries = sorters.entries
+local replacements = sorters.replacements
-local variables = interfaces.variables
-local context = context
+local processors = typesetters.processors
+local splitprocessor = processors.split
-local matchingtilldepth, numberatdepth = sections.matchingtilldepth, sections.numberatdepth
+local texgetcount = tex.getcount
+
+local variables = interfaces.variables
+local v_forward = variables.forward
+local v_all = variables.all
+local v_yes = variables.yes
+local v_current = variables.current
+local v_previous = variables.previous
+local v_text = variables.text
+
+local context = context
+local commands = commands
+
+local matchingtilldepth = sections.matchingtilldepth
+local numberatdepth = sections.numberatdepth
+local currentlevel = sections.currentlevel
+local currentid = sections.currentid
+
+local touserdata = helpers.touserdata
+
+local internalreferences = references.internals
+local setinternalreference = references.setinternalreference
+
+local setmetatableindex = table.setmetatableindex
+local texsetattribute = tex.setattribute
+
+local a_destination = attributes.private('destination')
+
+local absmaxlevel = 5 -- \c_strc_registers_maxlevel
+
+local ctx_startregisteroutput = context.startregisteroutput
+local ctx_stopregisteroutput = context.stopregisteroutput
+local ctx_startregistersection = context.startregistersection
+local ctx_stopregistersection = context.stopregistersection
+local ctx_startregisterentries = context.startregisterentries
+local ctx_stopregisterentries = context.stopregisterentries
+local ctx_startregisterentry = context.startregisterentry
+local ctx_stopregisterentry = context.stopregisterentry
+local ctx_startregisterpages = context.startregisterpages
+local ctx_stopregisterpages = context.stopregisterpages
+local ctx_stopregisterseewords = context.stopregisterseewords
+local ctx_startregisterseewords = context.startregisterseewords
+local ctx_registerentry = context.registerentry
+local ctx_registerseeword = context.registerseeword
+local ctx_registerpagerange = context.registerpagerange
+local ctx_registeronepage = context.registeronepage
-- some day we will share registers and lists (although there are some conceptual
-- differences in the application of keywords)
local function filtercollected(names,criterium,number,collected,prevmode)
- if not criterium or criterium == "" then criterium = variables.all end
- local data = documents.data
- local numbers, depth = data.numbers, data.depth
- local hash, result, nofresult, all, detail = { }, { }, 0, not names or names == "" or names == variables.all, nil
+ if not criterium or criterium == "" then
+ criterium = v_all
+ end
+ local data = documents.data
+ local numbers = data.numbers
+ local depth = data.depth
+ local hash = { }
+ local result = { }
+ local nofresult = 0
+ local all = not names or names == "" or names == v_all
+ local detail = nil
if not all then
for s in gmatch(names,"[^, ]+") do
hash[s] = true
end
end
- if criterium == variables.all or criterium == variables.text then
+ if criterium == v_all or criterium == v_text then
for i=1,#collected do
local v = collected[i]
if all then
@@ -65,10 +115,11 @@ local function filtercollected(names,criterium,number,collected,prevmode)
end
end
end
- elseif criterium == variables.current then
+ elseif criterium == v_current then
+ local collectedsections = sections.collected
for i=1,#collected do
local v = collected[i]
- local sectionnumber = sections.collected[v.references.section]
+ local sectionnumber = collectedsections[v.references.section]
if sectionnumber then
local cnumbers = sectionnumber.numbers
if prevmode then
@@ -103,10 +154,11 @@ local function filtercollected(names,criterium,number,collected,prevmode)
end
end
end
- elseif criterium == variables.previous then
+ elseif criterium == v_previous then
+ local collectedsections = sections.collected
for i=1,#collected do
local v = collected[i]
- local sectionnumber = sections.collected[v.references.section]
+ local sectionnumber = collectedsections[v.references.section]
if sectionnumber then
local cnumbers = sectionnumber.numbers
if (all or hash[v.metadata.name]) and #cnumbers >= depth then
@@ -136,9 +188,9 @@ local function filtercollected(names,criterium,number,collected,prevmode)
end
elseif criterium == variables["local"] then
if sections.autodepth(data.numbers) == 0 then
- return filtercollected(names,variables.all,number,collected,prevmode)
+ return filtercollected(names,v_all,number,collected,prevmode)
else
- return filtercollected(names,variables.current,number,collected,prevmode)
+ return filtercollected(names,v_current,number,collected,prevmode)
end
else -- sectionname, number
-- beware, this works ok for registers
@@ -188,44 +240,77 @@ registers.filtercollected = filtercollected
-- result table; we might do that here as well but since sorting code is
-- older we delay that decision
+-- maybe store the specification in the format (although we predefine only
+-- saved registers)
+
+local function checker(t,k)
+ local v = {
+ metadata = {
+ language = 'en',
+ sorted = false,
+ class = class,
+ },
+ entries = { },
+ }
+ t[k] = v
+ return v
+end
+
local function initializer()
tobesaved = registers.tobesaved
collected = registers.collected
- local internals = references.internals
+ setmetatableindex(tobesaved,checker)
+ setmetatableindex(collected,checker)
+ local usedinternals = references.usedinternals
for name, list in next, collected do
local entries = list.entries
- for e=1,#entries do
- local entry = entries[e]
- local r = entry.references
- if r then
- local internal = r and r.internal
- if internal then
- internals[internal] = entry
+ if not list.metadata.notsaved then
+ for e=1,#entries do
+ local entry = entries[e]
+ local r = entry.references
+ if r then
+ local internal = r and r.internal
+ if internal then
+ internalreferences[internal] = entry
+ usedinternals[internal] = r.used
+ end
end
end
end
end
end
-job.register('structures.registers.collected', tobesaved, initializer)
+local function finalizer()
+ local flaginternals = references.flaginternals
+ for k, v in next, tobesaved do
+ local entries = v.entries
+ if entries then
+ for i=1,#entries do
+ local r = entries[i].references
+ if r and flaginternals[r.internal] then
+ r.used = true
+ end
+ end
+ end
+ end
+end
+
+job.register('structures.registers.collected', tobesaved, initializer, finalizer)
+
+setmetatableindex(tobesaved,checker)
+setmetatableindex(collected,checker)
-local function allocate(class)
+local function defineregister(class,method)
local d = tobesaved[class]
- if not d then
- d = {
- metadata = {
- language = 'en',
- sorted = false,
- class = class
- },
- entries = { },
- }
- tobesaved[class] = d
- end
- return d
+ if method == v_forward then
+ d.metadata.notsaved = true
+ end
end
-registers.define = allocate
+registers.define = defineregister -- 4 times is somewhat over the top but we want consistency
+registers.setmethod = defineregister -- and we might have a difference some day
+commands.defineregister = defineregister
+commands.setregistermethod = defineregister
local entrysplitter = lpeg.tsplitat('+') -- & obsolete in mkiv
@@ -234,7 +319,6 @@ local tagged = { }
local function preprocessentries(rawdata)
local entries = rawdata.entries
if entries then
---~ table.print(rawdata)
local e, k = entries[1] or "", entries[2] or ""
local et, kt, entryproc, pageproc
if type(e) == "table" then
@@ -250,14 +334,15 @@ local function preprocessentries(rawdata)
kt = lpegmatch(entrysplitter,k)
end
entries = { }
- for k=1,#et do
- entries[k] = { et[k] or "", kt[k] or "" }
- end
+ local ok = false
for k=#et,1,-1 do
- if entries[k][1] ~= "" then
- break
- else
+ local etk = et[k]
+ local ktk = kt[k]
+ if not ok and etk == "" then
entries[k] = nil
+ else
+ entries[k] = { etk or "", ktk ~= "" and ktk or nil }
+ ok = true
end
end
rawdata.list = entries
@@ -272,44 +357,94 @@ local function preprocessentries(rawdata)
end
end
-function registers.store(rawdata) -- metadata, references, entries
- local data = allocate(rawdata.metadata.name).entries
+local function storeregister(rawdata) -- metadata, references, entries
local references = rawdata.references
- references.realpage = references.realpage or 0 -- just to be sure as it can be refered to
+ local metadata = rawdata.metadata
+ -- checking
+ if not metadata.kind then
+ metadata.kind = "entry"
+ end
+ --
+ if not metadata.catcodes then
+ metadata.catcodes = tex.catcodetable -- get
+ end
+ --
+ local name = metadata.name
+ local notsaved = tobesaved[name].metadata.notsaved
+ --
+ local internal = references.internal
+ if not internal then
+ internal = texgetcount("locationcount") -- we assume that it has been set
+ references.internal = internal
+ end
+ --
+ if notsaved then
+ usedinternals[internal] = true -- todo view (we assume that forward references index entries are used)
+ end
+ --
+ if not references.realpage then
+ references.realpage = 0 -- just to be sure as it can be refered to
+ end
+ --
+ local userdata = rawdata.userdata
+ if userdata then
+ rawdata.userdata = touserdata(userdata)
+ end
+ --
+ references.section = currentid()
+ metadata.level = currentlevel()
+ --
+ local data = notsaved and collected[name] or tobesaved[name]
+ local entries = data.entries
+ internalreferences[internal] = rawdata
preprocessentries(rawdata)
- data[#data+1] = rawdata
+ entries[#entries+1] = rawdata
local label = references.label
- if label and label ~= "" then tagged[label] = #data end
- context(#data)
+ if label and label ~= "" then
+ tagged[label] = #entries
+ else
+ references.label = nil
+ end
+ return #entries
end
-function registers.enhance(name,n)
- local r = tobesaved[name].entries[n]
- if r then
- r.references.realpage = texcount.realpageno
+local function enhanceregister(name,n)
+ local data = tobesaved[name].metadata.notsaved and collected[name] or tobesaved[name]
+ local entry = data.entries[n]
+ if entry then
+ entry.references.realpage = texgetcount("realpageno")
end
end
-function registers.extend(name,tag,rawdata) -- maybe do lastsection internally
+local function extendregister(name,tag,rawdata) -- maybe do lastsection internally
if type(tag) == "string" then
tag = tagged[tag]
end
if tag then
- local r = tobesaved[name].entries[tag]
- if r then
- local rr = r.references
- rr.lastrealpage = texcount.realpageno
- rr.lastsection = sections.currentid()
+ local data = tobesaved[name].metadata.notsaved and collected[name] or tobesaved[name]
+ local entry = data.entries[tag]
+ if entry then
+ local references = entry.references
+ references.lastrealpage = texgetcount("realpageno")
+ references.lastsection = currentid()
if rawdata then
+ local userdata = rawdata.userdata
+ if userdata then
+ rawdata.userdata = touserdata(userdata)
+ end
if rawdata.entries then
preprocessentries(rawdata)
end
- for k,v in next, rawdata do
- if not r[k] then
- r[k] = v
+ local metadata = rawdata.metadata
+ if metadata and not metadata.catcodes then
+ metadata.catcodes = tex.catcodetable -- get
+ end
+ for k, v in next, rawdata do
+ local rk = references[k]
+ if not rk then
+ references[k] = v
else
- local rk = r[k]
- for kk,vv in next, v do
+ for kk, vv in next, v do
if type(vv) == "table" then
if next(vv) then
rk[kk] = vv
@@ -325,6 +460,19 @@ function registers.extend(name,tag,rawdata) -- maybe do lastsection internally
end
end
+registers.store = storeregister
+registers.enhance = enhanceregister
+registers.extend = extendregister
+
+function commands.storeregister(rawdata)
+ local nofentries = storeregister(rawdata)
+ setinternalreference(nil,nil,rawdata.references.internal)
+ context(nofentries)
+end
+
+commands.enhanceregister = enhanceregister
+commands.extendregister = extendregister
+
-- sorting and rendering
local compare = sorters.comparers.basic
@@ -334,7 +482,8 @@ function registers.compare(a,b)
if result ~= 0 then
return result
else
- local ka, kb = a.metadata.kind, b.metadata.kind
+ local ka = a.metadata.kind
+ local kb = b.metadata.kind
if ka == kb then
local page_a, page_b = a.references.realpage, b.references.realpage
if not page_a or not page_b then
@@ -448,17 +597,19 @@ end
function registers.prepare(data)
-- data has 'list' table
- local strip = sorters.strip
+ local strip = sorters.strip
local splitter = sorters.splitters.utf
- local result = data.result
+ local result = data.result
if result then
for i=1, #result do
- local entry, split = result[i], { }
- local list = entry.list
+ local entry = result[i]
+ local split = { }
+ local list = entry.list
if list then
for l=1,#list do
- local ll = list[l]
- local word, key = ll[1], ll[2]
+ local ll = list[l]
+ local word = ll[1]
+ local key = ll[2]
if not key or key == "" then
key = word
end
@@ -473,7 +624,11 @@ function registers.prepare(data)
end
function registers.sort(data,options)
- sorters.sort(data.result,registers.compare)
+ -- if options.pagenumber == false then
+ -- sorters.sort(data.result,compare)
+ -- else
+ sorters.sort(data.result,registers.compare)
+ -- end
end
function registers.unique(data,options)
@@ -482,7 +637,8 @@ function registers.unique(data,options)
for k=1,#dataresult do
local v = dataresult[k]
if prev then
- local pr, vr = prev.references, v.references
+ local vr = v.references
+ local pr = prev.references
if not equal(prev.list,v.list) then
-- ok
elseif pr.realpage ~= vr.realpage then
@@ -525,10 +681,11 @@ function registers.finalize(data,options) -- maps character to index (order)
if trace_registers then
report_registers("splitting at %a",tag)
end
- done, nofdone = { }, 0
+ done = { }
+ nofdone = 0
nofsplit = nofsplit + 1
+ lasttag = tag
split[nofsplit] = { tag = tag, data = done }
- lasttag = tag
end
nofdone = nofdone + 1
done[nofdone] = v
@@ -536,7 +693,7 @@ function registers.finalize(data,options) -- maps character to index (order)
data.result = split
end
-function registers.analyzed(class,options)
+local function analyzeregister(class,options)
local data = collected[class]
if data and data.entries then
options = options or { }
@@ -553,9 +710,21 @@ function registers.analyzed(class,options)
end
end
+registers.analyze = analyzeregister
+
+function registers.analyze(class,options)
+ context(analyzeregister(class,options))
+end
+
+
-- todo take conversion from index
function registers.userdata(index,name)
+ local data = references.internals[tonumber(index)]
+ return data and data.userdata and data.userdata[name] or nil
+end
+
+function commands.registeruserdata(index,name)
local data = references.internals[tonumber(index)]
data = data and data.userdata and data.userdata[name]
if data then
@@ -565,22 +734,26 @@ end
-- todo: ownnumber
+local h_prefixpage = helpers.prefixpage
+local h_prefixlastpage = helpers.prefixlastpage
+local h_title = helpers.title
+
local function pagerange(f_entry,t_entry,is_last,prefixspec,pagespec)
local fer, ter = f_entry.references, t_entry.references
- context.registerpagerange(
+ ctx_registerpagerange(
f_entry.processors and f_entry.processors[2] or "",
fer.internal or 0,
fer.realpage or 0,
function()
- helpers.prefixpage(f_entry,prefixspec,pagespec)
+ h_prefixpage(f_entry,prefixspec,pagespec)
end,
ter.internal or 0,
ter.lastrealpage or ter.realpage or 0,
function()
if is_last then
- helpers.prefixlastpage(t_entry,prefixspec,pagespec) -- swaps page and realpage keys
+ h_prefixlastpage(t_entry,prefixspec,pagespec) -- swaps page and realpage keys
else
- helpers.prefixpage (t_entry,prefixspec,pagespec)
+ h_prefixpage (t_entry,prefixspec,pagespec)
end
end
)
@@ -588,11 +761,11 @@ end
local function pagenumber(entry,prefixspec,pagespec)
local er = entry.references
- context.registeronepage(
+ ctx_registeronepage(
entry.processors and entry.processors[2] or "",
er.internal or 0,
er.realpage or 0,
- function() helpers.prefixpage(entry,prefixspec,pagespec) end
+ function() h_prefixpage(entry,prefixspec,pagespec) end
)
end
@@ -660,17 +833,39 @@ local function collapsepages(pages)
end
function registers.flush(data,options,prefixspec,pagespec)
- local collapse_singles = options.compress == variables.yes
- local collapse_ranges = options.compress == variables.all
+ local collapse_singles = options.compress == v_yes
+ local collapse_ranges = options.compress == v_all
+ local show_page_number = options.pagenumber ~= false -- true or false
local result = data.result
- context.startregisteroutput()
+ local maxlevel = 0
+ --
+ for i=1,#result do
+ local data = result[i].data
+ for d=1,#data do
+ local m = #data[d].list
+ if m > maxlevel then
+ maxlevel = m
+ end
+ end
+ end
+ if maxlevel > absmaxlevel then
+ maxlevel = absmaxlevel
+ report_registers("limiting level to %a",maxlevel)
+ end
+ --
+ ctx_startregisteroutput()
+ local done = { }
+ local started = false
for i=1,#result do
-- ranges need checking !
local sublist = result[i]
- local done = { false, false, false, false }
+ -- local done = { false, false, false, false }
+ for i=1,maxlevel do
+ done[i] = false
+ end
local data = sublist.data
local d, n = 0, 0
- context.startregistersection(sublist.tag)
+ ctx_startregistersection(sublist.tag)
for d=1,#data do
local entry = data[d]
if entry.metadata.kind == "see" then
@@ -683,131 +878,154 @@ function registers.flush(data,options,prefixspec,pagespec)
end
end
end
+ -- ok, this is tricky: we use e[i] delayed so we need it to be local
+ -- but we don't want to allocate too many entries so there we go
while d < #data do
d = d + 1
local entry = data[d]
- local e = { false, false, false, false }
+ local e = { false, false, false }
+ for i=3,maxlevel do
+ e[i] = false
+ end
local metadata = entry.metadata
local kind = metadata.kind
local list = entry.list
- for i=1,4 do -- max 4
+ for i=1,maxlevel do
if list[i] then
e[i] = list[i][1]
end
if e[i] ~= done[i] then
if e[i] and e[i] ~= "" then
done[i] = e[i]
+ for j=i+1,maxlevel do
+ done[j] = false
+ end
+ if started then
+ ctx_stopregisterentry()
+ started = false
+ end
if n == i then
- context.stopregisterentries()
- context.startregisterentries(n)
+-- ctx_stopregisterentries()
+-- ctx_startregisterentries(n)
else
while n > i do
n = n - 1
- context.stopregisterentries()
+ ctx_stopregisterentries()
end
while n < i do
n = n + 1
- context.startregisterentries(n)
+ ctx_startregisterentries(n)
end
end
- local internal = entry.references.internal or 0
- local seeparent = entry.references.seeparent or ""
- local processor = entry.processors and entry.processors[1] or ""
+ local references = entry.references
+ local processors = entry.processors
+ local internal = references.internal or 0
+ local seeparent = references.seeparent or ""
+ local processor = processors and processors[1] or ""
+ -- so, we need to keep e as is (local), or we need local title = e[i] ... which might be
+ -- more of a problem
+ ctx_startregisterentry(0) -- will become a counter
+ started = true
if metadata then
- context.registerentry(processor,internal,seeparent,function() helpers.title(e[i],metadata) end)
+ ctx_registerentry(processor,internal,seeparent,function() h_title(e[i],metadata) end)
else -- ?
- context.registerentry(processor,internal,seeindex,e[i])
+ ctx_registerentry(processor,internal,seeindex,e[i])
end
else
done[i] = false
+ for j=i+1,maxlevel do
+ done[j] = false
+ end
end
end
end
if kind == 'entry' then
- context.startregisterpages()
- if collapse_singles or collapse_ranges then
- -- we collapse ranges and keep existing ranges as they are
- -- so we get prebuilt as well as built ranges
- local first, last, prev, pages, dd, nofpages = entry, nil, entry, { }, d, 0
- while dd < #data do
- dd = dd + 1
- local next = data[dd]
- if next and next.metadata.kind == "see" then
- dd = dd - 1
- break
- else
- local el, nl = entry.list, next.list
- if not equal(el,nl) then
+ if show_page_number then
+ ctx_startregisterpages()
+ if collapse_singles or collapse_ranges then
+ -- we collapse ranges and keep existing ranges as they are
+ -- so we get prebuilt as well as built ranges
+ local first, last, prev, pages, dd, nofpages = entry, nil, entry, { }, d, 0
+ while dd < #data do
+ dd = dd + 1
+ local next = data[dd]
+ if next and next.metadata.kind == "see" then
dd = dd - 1
- --~ first = nil
break
- elseif next.references.lastrealpage then
- nofpages = nofpages + 1
- pages[nofpages] = first and { first, last or first } or { entry, entry }
- nofpages = nofpages + 1
- pages[nofpages] = { next, next }
- first, last, prev = nil, nil, nil
- elseif not first then
- first, prev = next, next
- elseif next.references.realpage - prev.references.realpage == 1 then -- 1 ?
- last, prev = next, next
else
- nofpages = nofpages + 1
- pages[nofpages] = { first, last or first }
- first, last, prev = next, nil, next
+ local el, nl = entry.list, next.list
+ if not equal(el,nl) then
+ dd = dd - 1
+ --~ first = nil
+ break
+ elseif next.references.lastrealpage then
+ nofpages = nofpages + 1
+ pages[nofpages] = first and { first, last or first } or { entry, entry }
+ nofpages = nofpages + 1
+ pages[nofpages] = { next, next }
+ first, last, prev = nil, nil, nil
+ elseif not first then
+ first, prev = next, next
+ elseif next.references.realpage - prev.references.realpage == 1 then -- 1 ?
+ last, prev = next, next
+ else
+ nofpages = nofpages + 1
+ pages[nofpages] = { first, last or first }
+ first, last, prev = next, nil, next
+ end
end
end
- end
- if first then
- nofpages = nofpages + 1
- pages[nofpages] = { first, last or first }
- end
- if collapse_ranges and nofpages > 1 then
- nofpages = collapsepages(pages)
- end
- if nofpages > 0 then -- or 0
- d = dd
- for p=1,nofpages do
- local first, last = pages[p][1], pages[p][2]
- if first == last then
- if first.references.lastrealpage then
- pagerange(first,first,true,prefixspec,pagespec)
+ if first then
+ nofpages = nofpages + 1
+ pages[nofpages] = { first, last or first }
+ end
+ if collapse_ranges and nofpages > 1 then
+ nofpages = collapsepages(pages)
+ end
+ if nofpages > 0 then -- or 0
+ d = dd
+ for p=1,nofpages do
+ local first, last = pages[p][1], pages[p][2]
+ if first == last then
+ if first.references.lastrealpage then
+ pagerange(first,first,true,prefixspec,pagespec)
+ else
+ pagenumber(first,prefixspec,pagespec)
+ end
+ elseif last.references.lastrealpage then
+ pagerange(first,last,true,prefixspec,pagespec)
else
- pagenumber(first,prefixspec,pagespec)
+ pagerange(first,last,false,prefixspec,pagespec)
end
- elseif last.references.lastrealpage then
- pagerange(first,last,true,prefixspec,pagespec)
- else
- pagerange(first,last,false,prefixspec,pagespec)
end
- end
- elseif entry.references.lastrealpage then
- pagerange(entry,entry,true,prefixspec,pagespec)
- else
- pagenumber(entry,prefixspec,pagespec)
- end
- else
- while true do
- if entry.references.lastrealpage then
+ elseif entry.references.lastrealpage then
pagerange(entry,entry,true,prefixspec,pagespec)
else
pagenumber(entry,prefixspec,pagespec)
end
- if d == #data then
- break
- else
- d = d + 1
- local next = data[d]
- if next.metadata.kind == "see" or not equal(entry.list,next.list) then
- d = d - 1
+ else
+ while true do
+ if entry.references.lastrealpage then
+ pagerange(entry,entry,true,prefixspec,pagespec)
+ else
+ pagenumber(entry,prefixspec,pagespec)
+ end
+ if d == #data then
break
else
- entry = next
+ d = d + 1
+ local next = data[d]
+ if next.metadata.kind == "see" or not equal(entry.list,next.list) then
+ d = d - 1
+ break
+ else
+ entry = next
+ end
end
end
end
+ ctx_stopregisterpages()
end
- context.stopregisterpages()
elseif kind == 'see' then
local t, nt = { }, 0
while true do
@@ -826,37 +1044,46 @@ function registers.flush(data,options,prefixspec,pagespec)
end
end
end
- context.startregisterseewords()
+ ctx_startregisterseewords()
for i=1,nt do
local entry = t[i]
local seeword = entry.seeword
local seetext = seeword.text or ""
local processor = seeword.processor or (entry.processors and entry.processors[1]) or ""
local seeindex = entry.references.seeindex or ""
- context.registerseeword(i,n,processor,0,seeindex,seetext)
+ ctx_registerseeword(i,n,processor,0,seeindex,seetext)
end
- context.stopregisterseewords()
+ ctx_stopregisterseewords()
end
end
+ if started then
+ ctx_stopregisterentry()
+ started = false
+ end
while n > 0 do
- context.stopregisterentries()
+ ctx_stopregisterentries()
n = n - 1
end
- context.stopregistersection()
+ ctx_stopregistersection()
end
- context.stopregisteroutput()
+ ctx_stopregisteroutput()
-- for now, maybe at some point we will do a multipass or so
data.result = nil
data.metadata.sorted = false
+ -- temp hack for luajittex :
+ local entries = data.entries
+ for i=1,#entries do
+ entries[i].split = nil
+ end
+ -- collectgarbage("collect")
end
-function registers.analyze(class,options)
- context(registers.analyzed(class,options))
-end
-
-function registers.process(class,...)
- if registers.analyzed(class,...) > 0 then
- registers.flush(collected[class],...)
+local function processregister(class,...)
+ if analyzeregister(class,...) > 0 then
+ local data = collected[class]
+ registers.flush(data,...)
end
end
+registers.process = processregister
+commands.processregister = processregister
diff --git a/Master/texmf-dist/tex/context/base/strc-reg.mkiv b/Master/texmf-dist/tex/context/base/strc-reg.mkiv
index 8c9f040f0d6..d072aca69b1 100644
--- a/Master/texmf-dist/tex/context/base/strc-reg.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-reg.mkiv
@@ -17,6 +17,8 @@
\unprotect
+\startcontextdefinitioncode
+
% todo: tag:: becomes rendering
% todo: language, character, linked, location
% todo: fonts etc at sublevels (already defined)
@@ -106,6 +108,14 @@
\c!entries=,
\c!alternative=]
+
+\definemixedcolumns
+ [\v!register]
+ [\c!n=\registerparameter\c!n,
+ \c!balance=\registerparameter\c!balance,
+ \c!align=\registerparameter\c!align,
+ \c!tolerance=\registerparameter\c!tolerance]
+
%D \starttyping
%D \setupregister[index][1][textcolor=darkred]
%D \setupregister[index][2][textcolor=darkgreen,textstyle=bold]
@@ -116,13 +126,15 @@
%D \stoptyping
\newconditional\c_strc_registers_defining
+\setnewconstant\c_strc_registers_maxlevel \plusfive
\ifdefined\Word \else \unexpanded\def\Word#1{#1} \fi
\appendtoks
\ifconditional\c_strc_registers_defining \else % todo: dosingle ...
\settrue\c_strc_registers_defining
- \ctxlua{structures.registers.define('\currentregister')}%
+ \definemixedcolumns[\currentregister][\v!register]% first as otherwise it overloads start/stop
+ \ctxcommand{defineregister("\currentregister","\registerparameter\c!referencemethod")}%
\normalexpanded{\presetheadtext[\currentregister=\Word{\currentregister}]}%
\setuevalue{\currentregister}{\dodoubleempty\strc_registers_insert_entry[\currentregister]}%
\setuevalue{\e!see\currentregister}{\dodoubleempty\strc_registers_insert_see[\currentregister]}%
@@ -131,7 +143,7 @@
\setuevalue{\e!place\currentregister}{\placeregister[\currentregister]}%
\setuevalue{\e!complete\currentregister}{\completeregister[\currentregister]}%
\setuevalue{\e!setup\currentregister\e!endsetup}{\setupregister[\currentregister]}%
- \dorecurse\plusthree {% weird, expanded should not be needed
+ \dorecurse\c_strc_registers_maxlevel{% weird, expanded should not be needed
\normalexpanded{\defineregister[\currentregister:\recurselevel][\currentregister]}%
%\defineregister[\currentregister:\recurselevel][\currentregister]%
\letregisterparameter{\c!entries:\recurselevel}\empty % needed as we use detokenize (ok, we can
@@ -142,6 +154,10 @@
\fi
\to \everydefineregister
+\appendtoks
+ \ctxcommand{setregistermethod("\currentregister","\registerparameter\c!referencemethod")}%
+\to \everysetupregister
+
%D Registering:
\def\strc_registers_register_page_entry
@@ -151,6 +167,52 @@
\expandafter\strc_registers_register_page_entry_indeed
\fi}
+\def\strc_registers_register_page_expand_xml_entries
+ {\xmlstartraw
+ \xdef\currentregisterentriesa{\registerparameter{\c!entries:1}}%
+ \xdef\currentregisterentriesb{\registerparameter{\c!entries:2}}%
+ \xdef\currentregisterentriesc{\registerparameter{\c!entries:3}}%
+ \xmlstopraw
+ \globallet\currentregistercoding\s!xml}
+
+\def\strc_registers_register_page_expand_yes_entries
+ {\xdef\currentregisterentriesa{\registerparameter{\c!entries:1}}%
+ \xdef\currentregisterentriesb{\registerparameter{\c!entries:2}}%
+ \xdef\currentregisterentriesc{\registerparameter{\c!entries:3}}%
+ \globallet\currentregistercoding\s!tex}
+
+\def\strc_registers_register_page_expand_nop_entries
+ {\xdef\currentregisterentriesa{\detokenizedregisterparameter{\c!entries:1}}%
+ \xdef\currentregisterentriesb{\detokenizedregisterparameter{\c!entries:2}}%
+ \xdef\currentregisterentriesc{\detokenizedregisterparameter{\c!entries:3}}%
+ \globallet\currentregistercoding\s!tex}
+
+\def\strc_registers_register_page_expand_xml
+ {\xmlstartraw
+ \xdef\currentregisterentries{\registerparameter\c!entries}%
+ \xmlstopraw
+ \globallet\currentregistercoding\s!xml}
+
+\def\strc_registers_register_page_expand_yes
+ {\xdef\currentregisterentries{\registerparameter\c!entries}%
+ \globallet\currentregistercoding\s!tex}
+
+\def\strc_registers_register_page_expand_nop
+ {\xdef\currentregisterentries{\detokenizedregisterparameter\c!entries}%
+ \globallet\currentregistercoding\s!tex}
+
+\def\strc_registers_register_page_expand_xml_keys
+ {\xmlstartraw
+ \xdef\currentregisterkeysa{\registerparameter{\c!keys:1}}%
+ \xdef\currentregisterkeysb{\registerparameter{\c!keys:2}}%
+ \xdef\currentregisterkeysc{\registerparameter{\c!keys:3}}%
+ \xmlstopraw}
+
+\def\strc_registers_register_page_expand_yes_keys
+ {\xdef\currentregisterkeysa{\registerparameter{\c!keys:1}}%
+ \xdef\currentregisterkeysb{\registerparameter{\c!keys:2}}%
+ \xdef\currentregisterkeysc{\registerparameter{\c!keys:3}}}
+
\def\strc_registers_register_page_entry_indeed#1#2#3% register data userdata
{\begingroup
\edef\currentregister{#1}%
@@ -164,75 +226,54 @@
\xdef\currentregisterxmlsetup {\registerparameter\c!xmlsetup}%
\ifx\currentregisterentries\empty
\ifx\currentregisterexpansion\s!xml
- \xmlstartraw
- \xdef\currentregisterentriesa{\registerparameter{\c!entries:1}}%
- \xdef\currentregisterentriesb{\registerparameter{\c!entries:2}}%
- \xdef\currentregisterentriesc{\registerparameter{\c!entries:3}}%
- \xmlstopraw
- \globallet\currentregistercoding\s!xml
+ \strc_registers_register_page_expand_xml_entries
+ \else\ifx\currentregisterexpansion\v!yes
+ \strc_registers_register_page_expand_yes_entries
\else
- \ifx\currentregisterexpansion\v!yes
- \xdef\currentregisterentriesa{\registerparameter{\c!entries:1}}%
- \xdef\currentregisterentriesb{\registerparameter{\c!entries:2}}%
- \xdef\currentregisterentriesc{\registerparameter{\c!entries:3}}%
- \else
- \xdef\currentregisterentriesa{\detokenizedregisterparameter{\c!entries:1}}%
- \xdef\currentregisterentriesb{\detokenizedregisterparameter{\c!entries:2}}%
- \xdef\currentregisterentriesc{\detokenizedregisterparameter{\c!entries:3}}%
- \fi
- \globallet\currentregistercoding\s!tex
- \fi
+ \strc_registers_register_page_expand_nop_entries
+ \fi\fi
\else
\ifx\currentregisterexpansion\s!xml
- \xmlstartraw
- \xdef\currentregisterentries{\registerparameter\c!entries}%
- \xmlstopraw
- \globallet\currentregistercoding\s!xml
+ \strc_registers_register_page_expand_xml
+ \else\ifx\currentregisterexpansion\v!yes
+ \strc_registers_register_page_expand_yes
\else
- \ifx\currentregisterexpansion\v!yes
- \xdef\currentregisterentries{\registerparameter\c!entries}%
- \else
- \xdef\currentregisterentries{\detokenizedregisterparameter\c!entries}%
- \fi
- \globallet\currentregistercoding\s!tex
- \fi
+ \strc_registers_register_page_expand_nop
+ \fi\fi
\fi
\ifx\currentregisterkeys\empty
\ifx\currentregistercoding\s!xml
- \xmlstartraw
- \xdef\currentregisterkeysa{\registerparameter{\c!keys:1}}%
- \xdef\currentregisterkeysb{\registerparameter{\c!keys:2}}%
- \xdef\currentregisterkeysc{\registerparameter{\c!keys:3}}%
- \xmlstopraw
+ \strc_registers_register_page_expand_xml_keys
\else
- \xdef\currentregisterkeysa{\registerparameter{\c!keys:1}}%
- \xdef\currentregisterkeysb{\registerparameter{\c!keys:2}}%
- \xdef\currentregisterkeysc{\registerparameter{\c!keys:3}}%
+ \strc_registers_register_page_expand_yes_keys
\fi
\fi
\setnextinternalreference
% we could consider storing register entries in a list which we
% could then sort
- \xdef\currentregisternumber{\ctxlua{
- structures.registers.store { % 'own' should not be in metadata
+ \xdef\currentregisternumber{\ctxcommand{storeregister{ % 'own' should not be in metadata
metadata = {
- kind = "entry",
+ % kind = "entry",
name = "\currentregister",
- level = structures.sections.currentlevel(),
+ % level = structures.sections.currentlevel(),
coding = "\currentregistercoding",
- catcodes = \the\catcodetable,
+ % catcodes = \the\catcodetable,
\ifx\currentregisterownnumber\v!yes
own = "\registerparameter\c!alternative", % can be used instead of pagenumber
\fi
- xmlroot = \ifx\currentreferencecoding\s!xml "\xmldocument" \else nil \fi, % only useful when text
+ \ifx\currentreferencecoding\s!xml
+ xmlroot = "\xmldocument", % only useful when text
+ \fi
\ifx\currentregisterxmlsetup\empty \else
xmlsetup = "\currentregisterxmlsetup",
\fi
},
references = {
- internal = \nextinternalreference,
- section = structures.sections.currentid(), % hm, why then not also lastsection the same way
+ % internal = \nextinternalreference,
+ % section = structures.sections.currentid(), % hm, why then not also lastsection the same way
+ \ifx\currentregisterlabel\empty \else
label = "\currentregisterlabel",
+ \fi
},
% \ifx\currentregisterentries\empty \else
entries = {
@@ -252,11 +293,11 @@
userdata = structures.helpers.touserdata(\!!bs\detokenize{#3}\!!es)
}
}}%
- \ctxlua{structures.references.setinternalreference(nil,nil,\nextinternalreference)}%
+ % \ctxcommand{setinternalreference(nil,nil,\nextinternalreference)}% in previous
\ifx\currentregisterownnumber\v!yes
\glet\currentregistersynchronize\relax
\else
- \xdef\currentregistersynchronize{\ctxlatelua{structures.registers.enhance("\currentregister",\currentregisternumber)}}%
+ \xdef\currentregistersynchronize{\ctxlatecommand{enhanceregister("\currentregister",\currentregisternumber)}}%
\fi
\currentregistersynchronize % here?
% needs thinking ... bla\index{bla}. will break before the . but adding a
@@ -295,7 +336,7 @@
\fi}
\def\strc_registers_stop_entry[#1][#2]%
- {\normalexpanded{\ctxlatelua{structures.registers.extend("#1","#2")}}}
+ {\normalexpanded{\ctxlatecommand{extendregister("#1","#2")}}}
\def\setregisterentry {\dotripleempty\strc_registers_set_entry}
\def\finishregisterentry{\dotripleempty\strc_registers_finish_entry}
@@ -328,19 +369,19 @@
\fi
% I hate this kind of mess ... but it's a user request.
\ifx\currentregisterentries\empty
- \normalexpanded{\ctxlua{structures.registers.extend("\currentregister","\currentregisterlabel", {
+ \normalexpanded{\ctxcommand{extendregister("\currentregister","\currentregisterlabel", {
metadata = {
\ifx\currentregisterownnumber\v!yes
own = "\registerparameter\c!alternative", % can be used instead of pagenumber
\fi
},
- userdata = structures.helpers.touserdata(\!!bs\detokenize{#3}\!!es)
+ userdata = \!!bs\detokenize{#3}\!!es
})%
}}%
\else
- \normalexpanded{\ctxlua{structures.registers.extend("\currentregister","\currentregisterlabel", {
+ \normalexpanded{\ctxcommand{extendregister("\currentregister","\currentregisterlabel", {
metadata = {
- catcodes = \the\catcodetable,
+ % catcodes = \the\catcodetable,
coding = "\currentregistercoding",
\ifx\currentregisterownnumber\v!yes
own = "\registerparameter\c!alternative", % can be used instead of pagenumber
@@ -351,7 +392,7 @@
\!!bs\currentregisterentries\!!es,
\!!bs\currentregisterkeys\!!es
},
- userdata = structures.helpers.touserdata(\!!bs\detokenize{#3}\!!es)
+ userdata = \!!bs\detokenize{#3}\!!es
})
}}%
\fi
@@ -373,7 +414,7 @@
% \placeregister[index][n=1]
% \stoptext
-% some overlap wit previous
+% some overlap with previous
\unexpanded\def\setstructurepageregister
{\dotripleempty\strc_registers_set}
@@ -420,16 +461,16 @@
\fi
\setnextinternalreference
% we could consider storing register entries in list
- \edef\temp{\ctxlua{ structures.registers.store {
+ \edef\temp{\ctxcommand{storeregister{
metadata = {
kind = "see",
name = "\currentregister",
- level = structures.sections.currentlevel(),
- catcodes = \the\catcodetable,
+ % level = structures.sections.currentlevel(),
+ % catcodes = \the\catcodetable,
},
references = {
- internal = \nextinternalreference,
- section = structures.sections.currentid(),
+ % internal = \nextinternalreference,
+ % section = structures.sections.currentid(),
},
entries = {
% we need a special one for xml, this is just a single one
@@ -456,12 +497,13 @@
{\begingroup
\edef\currentregister{#1}%
\setupregister[\currentregister][#2]%
- \normalexpanded{\endgroup\noexpand\xdef\noexpand\utilityregisterlength{\ctxlua{structures.registers.analyze('\currentregister',{
+ \normalexpanded{\endgroup\noexpand\xdef\noexpand\utilityregisterlength{\ctxcommand{analyzeregister('\currentregister',{
language = "\registerparameter\s!language",
method = "\registerparameter\c!method",
numberorder = "\registerparameter\c!numberorder",
compress = "\registerparameter\c!compress",
criterium = "\registerparameter\c!criterium",
+ pagenumber = \ifx\registerpageseparatorsymbol\empty false\else true\fi,
})}}}% brrr
\ifcase\utilityregisterlength\relax
\resetsystemmode\v!register
@@ -478,6 +520,27 @@
\unexpanded\def\placeregister
{\dodoubleempty\strc_registers_place}
+% \def\strc_registers_place[#1][#2]%
+% {\iffirstargument
+% \begingroup
+% %\forgetall
+% \edef\currentregister{#1}%
+% \setupregister[\currentregister][#2]%
+% \the\everyplaceregister
+% \ifnum\registerparameter\c!n>\plusone
+% \startcolumns
+% [\c!n=\registerparameter\c!n,
+% \c!balance=\registerparameter\c!balance,
+% \c!align=\registerparameter\c!align,
+% \c!tolerance=\registerparameter\c!tolerance]%
+% \strc_registers_place_indeed
+% \stopcolumns
+% \else
+% \strc_registers_place_indeed
+% \fi
+% \endgroup
+% \fi}
+
\def\strc_registers_place[#1][#2]%
{\iffirstargument
\begingroup
@@ -485,43 +548,36 @@
\edef\currentregister{#1}%
\setupregister[\currentregister][#2]%
\the\everyplaceregister
- \ifnum\registerparameter\c!n>\plusone
- \startcolumns
- [\c!n=\registerparameter\c!n,
- \c!balance=\registerparameter\c!balance,
- \c!align=\registerparameter\c!align,
- \c!tolerance=\registerparameter\c!tolerance]%
- \strc_registers_place_indeed
- \stopcolumns
+ \ifnum\namedmixedcolumnsparameter\currentregister\c!n>\plusone
+ \startmixedcolumns[\currentregister]
+ \strc_registers_place_indeed
+ \stopmixedcolumns
\else
\strc_registers_place_indeed
\fi
\endgroup
\fi}
-\def\strc_registers_place_columns
- {\startcolumns
- [\c!n=\registerparameter\c!n,
- \c!balance=\registerparameter\c!balance,
- \c!align=\registerparameter\c!align,
- \c!tolerance=\registerparameter\c!tolerance]%
- \startpacked[\v!blank]%
- \strc_registers_place_indeed
- \stoppacked
- \stopcolumns}
-
-\def\strc_registers_place_normal
- {\startpacked[\v!blank]%
- \strc_registers_place_indeed
- \stoppacked}
+% \def\strc_registers_place_columns
+% {\startmixedcolumns[\currentregister]
+% \startpacked[\v!blank]%
+% \strc_registers_place_indeed
+% \stoppacked
+% \stopmixedcolumns}
+%
+% \def\strc_registers_place_normal
+% {\startpacked[\v!blank]%
+% \strc_registers_place_indeed
+% \stoppacked}
\def\strc_registers_place_indeed
- {\ctxlua{structures.registers.process('\currentregister',{
+ {\ctxcommand{processregister('\currentregister',{
language = "\registerparameter\s!language",
method = "\registerparameter\c!method",
numberorder = "\registerparameter\c!numberorder",
compress = "\registerparameter\c!compress",
criterium = "\registerparameter\c!criterium",
+ pagenumber = \ifx\registerpageseparatorsymbol\empty false\else true\fi,
},{
separatorset = "\registerparameter\c!pageprefixseparatorset",
conversionset = "\registerparameter\c!pageprefixconversionset",
@@ -672,16 +728,35 @@
\dostoptagged
\endgroup}
+% \unexpanded\def\startregisterentries#1% depth
+% {\endgraf
+% \begingroup
+% \dostarttagged\t!registerentries\empty
+% \let\savedcurrentregister\currentregister
+% \edef\currentregister{\currentregister:#1}%
+% \useregisterstyleandcolor\c!textstyle\c!textcolor
+% \advance\leftskip\numexpr#1-\plusone\relax\dimexpr\d_strc_registers_distance\relax
+% \hangindent\registerparameter\c!distance\relax
+% \hangafter\plusone
+% \let\currentregister\savedcurrentregister}
+
+\newdimen\d_strc_registers_hangindent
+\newcount\c_strc_registers_hangafter
+
\unexpanded\def\startregisterentries#1% depth
{\endgraf
\begingroup
+ \scratchcounter\ifnum#1>\c_strc_registers_maxlevel\c_strc_registers_maxlevel\else#1\fi\relax
\dostarttagged\t!registerentries\empty
\let\savedcurrentregister\currentregister
- \edef\currentregister{\currentregister:#1}%
+ \edef\currentregister{\currentregister:\number\scratchcounter}%
\useregisterstyleandcolor\c!textstyle\c!textcolor
- \advance\leftskip\numexpr#1-\plusone\relax\dimexpr\d_strc_registers_distance\relax
- \hangindent\registerparameter\c!distance\relax
- \hangafter\plusone
+ \ifnum\scratchcounter>\plusone
+ \advance\leftskip\d_strc_registers_distance\relax
+ \fi
+ \d_strc_registers_hangindent\registerparameter\c!distance\relax
+ \c_strc_registers_hangafter \plusone
+\blank[\v!samepage]%
\let\currentregister\savedcurrentregister}
\unexpanded\def\stopregisterentries
@@ -689,6 +764,15 @@
\dostoptagged
\endgroup}
+\unexpanded\def\startregisterentry#1% todo: level
+ {\begingroup
+ \hangindent\d_strc_registers_hangindent
+ \hangafter \c_strc_registers_hangafter}
+
+\unexpanded\def\stopregisterentry
+ {\endgraf
+ \endgroup}
+
\unexpanded\def\startregistersection#1% title
{\dostarttagged\t!registersection\empty
\dostarttagged\t!registertag\empty
@@ -729,7 +813,7 @@
\fi}
\unexpanded\def\registeronepagerangeseparator
- {|\endash|}
+ {|\endash|} % todo use \prewordbreak
\def\withregisterpagecommand#1#2#3#4%
{\def\currentregisterpageindex{#2}%
@@ -757,17 +841,12 @@
\dostoptagged
\dostoptagged}
-\unexpanded\def\doapplyregisterentrycommand#1#2% processor text
- {\dostarttagged\t!registerentry\empty
- \ifx\currentregisterseeindex\empty \else
- \dontleavehmode
- \dosetdirectpagereference{seeindex:\currentregisterseeindex}% maybe some day we will support an area
- \fi
- \applyprocessor{#1}{\registerparameter\c!textcommand{\limitedregisterentry{\registerparameter\c!deeptextcommand{#2}}}}%
- \dostoptagged}
+\let\strc_register_injector_process\relax
+\let\strc_register_injector_show \relax
\unexpanded\def\defaultregisterentry#1#2#3#4% #1:processor #2:internal #3:seeindex #4:word
{\def\currentregisterpageindex{#2}%
+ \strc_register_injector_process
\iflocation
\def\currentregisterseeindex{#3}%
\doifelse{\registerparameter\c!interaction}\v!text
@@ -778,6 +857,16 @@
\doapplyregisterentrycommand{#1}{#4}%
\fi}
+\unexpanded\def\doapplyregisterentrycommand#1#2% processor text
+ {\dostarttagged\t!registerentry\empty
+ \ifx\currentregisterseeindex\empty \else
+ \dontleavehmode
+ \strc_register_injector_show
+ \dosetdirectpagereference{seeindex:\currentregisterseeindex}% maybe some day we will support an area
+ \fi
+ \applyprocessor{#1}{\registerparameter\c!textcommand{\limitedregisterentry{\registerparameter\c!deeptextcommand{#2}}}}%
+ \dostoptagged}
+
\unexpanded\def\doapplyregisterseecommand#1#2%
{\ifx\currentregisterseeindex\empty
% \dontleavehmode
@@ -825,7 +914,7 @@
% \placeregister[index][n=1,pagecommand=\MyRegisterPageCommand]
% \stoptext
-\def\registerpageuserdata #1#2{\ctxlua{structures.registers.userdata(#1,"#2")}}
+\def\registerpageuserdata #1#2{\ctxcommand{registeruserdata(#1,"#2")}}
\def\currentregisterpageuserdata {\registerpageuserdata\currentregisterpageindex} % {#1}
% not yet ok : new internal handler names
@@ -836,10 +925,10 @@
\installcorenamespace{registersymbol}
\setvalue{\??registersymbol n}%
- {\def\registerpageseparatorsymbol{, }}
+ {\def\registerpageseparatorsymbol{,\space}}
\setvalue{\??registersymbol a}%
- {\def\registerpageseparatorsymbol{, }} % now done via conversion
+ {\def\registerpageseparatorsymbol{,\space}} % now done via conversion
\setvalue{\??registersymbol\v!none}%
{\let\registerpageseparatorsymbol\empty
@@ -883,4 +972,6 @@
[\v!index]
% [\v!indices]
+\stopcontextdefinitioncode
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-ren.mkiv b/Master/texmf-dist/tex/context/base/strc-ren.mkiv
index 00c8c3cd4d5..fdf8fb7f4a4 100644
--- a/Master/texmf-dist/tex/context/base/strc-ren.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-ren.mkiv
@@ -26,7 +26,7 @@
% \def\doTitle#1#2{\ruledvbox{\forgetall \hsize=4cm \ruledhbox{\ruledvtop{#1}\ruledvtop{#2}}}}
% \section{test test test test test test test test test test test test test test test test test}
-\newtoks\everyheadstart
+% \newtoks\everyheadstart % not used currently
\unexpanded\def\strc_rendering_initialize_style_and_color
{\ifconditional\headisdisplay
@@ -625,10 +625,16 @@
\dontleavehmode % in case there is no strut, else side effects with llap
\ifconditional\headshownumber
\llap {
+ \signalrightpage
\hbox {
\hfill
\headnumbercontent
- \hskip\dimexpr\d_strc_rendering_local_leftoffset+\doifoddpageelse\leftmargindistance\rightmargindistance\relax
+ \doifrightpageelse{
+ \scratchdistance\leftmargindistance
+ } {
+ \scratchdistance\rightmargindistance
+ }
+ \hskip\dimexpr\d_strc_rendering_local_leftoffset+\scratchdistance\relax
}
}
\fi
diff --git a/Master/texmf-dist/tex/context/base/strc-rsc.lua b/Master/texmf-dist/tex/context/base/strc-rsc.lua
index a90f577e30c..e2105a4efc1 100644
--- a/Master/texmf-dist/tex/context/base/strc-rsc.lua
+++ b/Master/texmf-dist/tex/context/base/strc-rsc.lua
@@ -67,11 +67,11 @@ local function splitreference(str)
local t = lpegmatch(referencesplitter,str)
if t then
local a = t.arguments
- if a and find(a,"\\") then
+ if a and find(a,"\\",1,true) then
t.has_tex = true
else
local o = t.arguments
- if o and find(o,"\\") then
+ if o and find(o,"\\",1,true) then
t.has_tex = true
end
end
diff --git a/Master/texmf-dist/tex/context/base/strc-sbe.mkiv b/Master/texmf-dist/tex/context/base/strc-sbe.mkiv
index 4ea08b30c2b..fc48307ec06 100644
--- a/Master/texmf-dist/tex/context/base/strc-sbe.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-sbe.mkiv
@@ -76,17 +76,24 @@
\to \everyaftersectionblock
\unexpanded\def\setsectionblock
- {\dosingleargument\strc_sectionblock_set}
+ {\dodoubleempty\strc_sectionblock_set}
-\def\strc_sectionblock_set[#1]% used to set the default
- {\edef\currentsectionblock{\ctxcommand{setsectionblock("#1")}}}
+\def\strc_sectionblock_set[#1][#2]% used to set the default
+ {\edef\currentsectionblock{#1}% from now on we assume a value
+ \setupcurrentsectionblock[#2]%
+ \ctxcommand{setsectionblock("#1", { bookmark = "\sectionblockparameter\c!bookmark" })}}
\let\currentsectionblock\empty % was \s!unknown
-\unexpanded\def\startsectionblock[#1]%
+\unexpanded\def\startsectionblock
+ {\dodoubleempty\strc_sectionblock_start}
+
+\unexpanded\def\strc_sectionblock_start[#1][#2]%
{%\ctxlua{structures.counters.check(0)}% we assume sane usage of \page, as this is the only workable place (in push)
\begingroup
- \edef\currentsectionblock{\ctxcommand{pushsectionblock("#1")}}%
+ \edef\currentsectionblock{#1}% from now on we assume a value
+ \setupcurrentsectionblock[#2]%
+ \ctxcommand{pushsectionblock("#1", { bookmark = "\sectionblockparameter\c!bookmark" })}%
\csname #1true\endcsname % obsolete
\setsystemmode\currentsectionblock
\the\everybeforesectionblock\relax
@@ -95,7 +102,7 @@
\unexpanded\def\stopsectionblock
{\showmessage\m!structures2\currentsectionblock
\the\everyaftersectionblock\relax
- \edef\currentsectionblock{\ctxcommand{popsectionblock()}}%
+ \ctxcommand{popsectionblock()}%
\endgroup}
%D \starttyping
diff --git a/Master/texmf-dist/tex/context/base/strc-sec.mkiv b/Master/texmf-dist/tex/context/base/strc-sec.mkiv
index a5ff2084e5a..122892104c1 100644
--- a/Master/texmf-dist/tex/context/base/strc-sec.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-sec.mkiv
@@ -15,6 +15,8 @@
\unprotect
+\startcontextdefinitioncode
+
\installcorenamespace{structure}
\installdirectcommandhandler \??structure {structure} % unchecked, so we need to initialize used parameters
@@ -88,6 +90,27 @@
\def\namedstructureheadlocation#1% expandable, maybe [#1]
{\csname\??savedinternalreference\ifcsname\??savedinternalreference#1\endcsname#1\else\s!default\fi\endcsname}
+% The next directive only makes sense when we have sort of garanteed outcome (math is not so
+% nice for instance).
+%
+% \enabledirectives[references.bookmarks.preroll]
+
+\newconditional\c_strc_bookmarks_preroll
+
+\installtexdirective
+ {references.bookmarks.preroll}
+ {\settrue \c_strc_bookmarks_preroll}
+ {\setfalse\c_strc_bookmarks_preroll}
+
+\def\strc_sectioning_autobookmark#1%
+ {\begingroup
+ \the\everypreroll
+ \nodestostring\tempstring{#1}%
+ \globallet\currentstructurebookmark\tempstring
+ \endgroup}
+
+% so it's an experiment
+
\unexpanded\def\strc_sectioning_register#1#2#3% #1=interfaced-settings, #2=optional user data (not yet supported)
{\begingroup
\setupstructure[\c!name={#1},#2]%
@@ -112,6 +135,9 @@
\xdef\currentstructuremarking {\structureparameter\c!marking}%
\xdef\currentstructurelist {\structureparameter\c!list}%
\xmlstopraw
+ \iflocation \ifx\currentstructurebookmark\empty \ifconditional\c_strc_bookmarks_preroll
+ \strc_sectioning_autobookmark\currentstructuretitle
+ \fi \fi \fi
\ifx\currentstructurelist\empty
\globallet\currentstructurelist\currentstructuretitle
\fi
@@ -122,16 +148,23 @@
\xdef\currentstructurebookmark{\structureparameter\c!bookmark}%
\xdef\currentstructuremarking {\structureparameter\c!marking}%
\xdef\currentstructurelist {\structureparameter\c!list}%
+ \iflocation \ifx\currentstructurebookmark\empty \ifconditional\c_strc_bookmarks_preroll
+ \strc_sectioning_autobookmark\currentstructuretitle
+ \fi \fi \fi
\else
\xdef\currentstructuretitle {\detokenizedstructureparameter\c!title}%
\xdef\currentstructurebookmark{\detokenizedstructureparameter\c!bookmark}%
\xdef\currentstructuremarking {\detokenizedstructureparameter\c!marking}%
\xdef\currentstructurelist {\detokenizedstructureparameter\c!list}%
\iflocation \ifx\currentstructurebookmark\empty
- \begingroup
- \simplifycommands
- \xdef\currentstructurebookmark{\detokenize\expandafter{\normalexpanded{\structureparameter\c!title}}}%
- \endgroup
+ \ifconditional\c_strc_bookmarks_preroll
+ \strc_sectioning_autobookmark{\structureparameter\c!title}%
+ \else
+ \begingroup
+ \simplifycommands
+ \xdef\currentstructurebookmark{\detokenize\expandafter{\normalexpanded{\structureparameter\c!title}}}%
+ \endgroup
+ \fi
\fi \fi
\fi
\ifx\currentstructurelist\empty
@@ -142,8 +175,8 @@
\setnextinternalreference
\storeinternalreference\currentstructurename\nextinternalreference %
\strc_sectioning_set_reference_prefix
- \xdef\currentstructurenumber{\ctxlua{ % todo: combine with next call, adapt marks accordingly
- structures.sections.somelevel {
+ \ctxcommand{% todo: combine with next call, adapt marks accordingly
+ setsectionentry{
references = {
internal = \nextinternalreference,
block = "\currentsectionblock",
@@ -190,7 +223,9 @@
numberdata = {
% needed ?
block = "\currentsectionblock",
- hidenumber = \ifx\currentstructureshownumber\v!no true\else nil\fi, % titles
+ \ifx\currentstructureshownumber\v!no
+ hidenumber = true, % titles
+ \fi
% so far
separatorset = "\structureparameter\c!sectionseparatorset",
conversion = "\structureparameter\c!sectionconversion", % for good old times sake
@@ -203,14 +238,12 @@
},
userdata = \!!bs\detokenize{#3}\!!es % will be converted to table at the lua end
}
- }}%
- % \xdef\currentstructurelistnumber{\ctxcommand{addtolist(structures.sections.current())}}%
+ }%
\xdef\currentstructurelistnumber{\ctxcommand{currentsectiontolist()}}%
% \currentstructuresynchronize has to be called someplace, since it introduces a node
\setstructuresynchronization\currentstructurelistnumber
\endgroup}
-\let\currentstructurenumber \!!zerocount
\let\currentsectioncountervalue \!!zerocount % redefined later
\let\previoussectioncountervalue\!!zerocount % redefined later
@@ -272,14 +305,14 @@
\newconditional\c_strc_rendering_continuous % not used (mkii ?)
-\def\setstructurelevel #1#2{\ctxlua{structures.sections.setlevel("#1","#2")}} % name, level|parent
-\def\getstructurelevel #1{\ctxlua{structures.sections.getcurrentlevel("#1")}}% name
-\def\setstructurenumber #1#2{\ctxlua{structures.sections.setnumber(#1,"#2")}} % level, number (+/-)
-\def\getstructurenumber #1{\ctxlua{structures.sections.getnumber(#1)}} % level
-\def\getsomestructurenumber #1#2{\ctxlua{structures.sections.getnumber(#1,"#2")}} % level, what
-\def\getfullstructurenumber #1{\ctxlua{structures.sections.fullnumber(#1)}} % level
-\def\getsomefullstructurenumber#1#2{\ctxlua{structures.sections.fullnumber(#1,"#2")}}
-\def\getspecificstructuretitle #1{\ctxlua{structures.sections.structuredata("#1","titledata.title",nil,"\headparameter\s!catcodes")}}%
+\def\setstructurelevel #1#2{\ctxcommand{setsectionlevel("#1","#2")}} % name, level|parent
+\def\getstructurelevel #1{\ctxcommand{getcurrentsectionlevel("#1")}}% name
+\def\setstructurenumber #1#2{\ctxcommand{setsectionnumber(#1,"#2")}} % level, number (+/-)
+\def\getstructurenumber #1{\ctxcommand{getsectionnumber(#1)}} % level
+\def\getsomestructurenumber #1#2{\ctxcommand{getsectionnumber(#1,"#2")}} % level, what
+\def\getfullstructurenumber #1{\ctxcommand{getfullsectionnumber(#1)}} % level
+\def\getsomefullstructurenumber#1#2{\ctxcommand{getfullsectionnumber(#1,"#2")}}
+\def\getspecificstructuretitle #1{\ctxcommand{getstructuredata("#1","titledata.title",nil,"\headparameter\s!catcodes")}}%
% will be:
%
@@ -407,7 +440,7 @@
\edef\currentsectionheadcoupling{\sectionheadcoupling\currenthead}%
\edef\currentsectionheadsection {\sectionheadsection \currentsectionheadcoupling}%
\edef\currentsectionlevel {\sectionlevel \currentsectionheadsection}%
- \ctxlua{structures.sections.register("\currenthead",{
+ \ctxcommand{registersection("\currenthead",{
coupling = "\currentsectionheadcoupling",
section = "\currentsectionheadsection",
level = \currentsectionlevel,
@@ -432,6 +465,9 @@
\unexpanded\def\startnamedsection
{\dotripleempty\strc_sectioning_start_named_section}
+% todo: add grouping but where: before/after trickery .. probably inside because one can always add
+% grouping to the before/after settings
+
\unexpanded\def\strc_sectioning_start_named_section[#1]% [#2][#3]
{\pushmacro\currentnamedsection
\edef\currentnamedsection{#1}%
@@ -547,8 +583,8 @@
% head -> head
-\def\sectionheadmarkingtitle #1#2{\ctxlua{structures.marks.title("#1","#2")}}
-\def\sectionheadmarkingnumber#1#2{\ctxlua{structures.marks.number("#1","#2")}}
+\def\sectionheadmarkingtitle #1#2{\ctxcommand{markingtitle("#1","#2")}}
+\def\sectionheadmarkingnumber#1#2{\ctxcommand{markingnumber("#1","#2")}}
\def\sectionheadcoupling#1{\namedheadparameter{#1}\c!coupling}
\def\sectionheadsection #1{\namedheadparameter{#1}\c!section}
@@ -588,11 +624,13 @@
\headparameter\c!beforesection % beware, no users vars set yet
\the\everybeforehead
\strc_sectioning_handle{#1}{#2}{#3}% name -- -- -- userdata (we might move the tagged to here)
+ % potential: \bgroup (can be optional: grouped = yes)
\headparameter\c!insidesection}
\unexpanded\def\strc_sectioning_stop[#1]% !!! also used at lua end
{\dostoptagged
\dostoptagged
+ % potential: \egroup
%\globalpopmacro\currenthead % so we do a hard recover
\xdef\currenthead{#1}% recover
\headparameter\c!aftersection
@@ -681,8 +719,15 @@
\setfalse\headshownumber
\fi}
+\newtoks\everyheadsynchronization
+
+\appendtoks
+ \currentstructuresynchronize
+\to \everyheadsynchronization
+
\unexpanded\def\theheadsynchonization
- {\currentstructuresynchronize}
+ {\the\everyheadsynchronization
+ \currentstructuresynchronize}
% BEWARE: \marking[section]{my text} does not work as we use list indices instead
% so we need a 'keep track of raw set option' (or maybe a funny internal prefix)
@@ -723,7 +768,7 @@
\unexpanded\def\placeheadtext {\dosingleempty\strc_sectioning_place_head_text } % use with care
\unexpanded\def\placeheadnumber{\dosingleempty\strc_sectioning_place_head_number} % use with care
-\unexpanded\def\strc_sectioning_report{\ctxlua{structures.sections.reportstructure()}}
+\unexpanded\def\strc_sectioning_report{\ctxcommand{reportstructure()}}
\ifdefined\strc_rendering_initialize_style_and_color \else
@@ -999,8 +1044,8 @@
#1%
\fi}
-\def\currentsectioncountervalue {\ctxlua{structures.sections.depthnumber(\thenamedheadlevel\currenthead)}}
-\def\previoussectioncountervalue{\ctxlua{structures.sections.depthnumber(\thenamedheadlevel\currenthead-1)}}
+\def\currentsectioncountervalue {\ctxcommand{depthnumber(\thenamedheadlevel\currenthead)}}
+\def\previoussectioncountervalue{\ctxcommand{depthnumber(\thenamedheadlevel\currenthead-1)}}
\def\strc_sectioning_handle_page_nop
{\edef\p_continue{\headparameter\c!continue}%
@@ -1079,7 +1124,7 @@
\let\sectioncountervalue\structurevalue
-\def\currentheadtext{obsolete, use marks}
+\def\currentheadtext{obsolete,\space use marks}
% list references, will be redone in lua when we need it
@@ -1114,4 +1159,6 @@
\finalizeautostructurelevels
\to \everystoptext
+\stopcontextdefinitioncode
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/strc-syn.lua b/Master/texmf-dist/tex/context/base/strc-syn.lua
index ca4b3ac1898..604365b2d0c 100644
--- a/Master/texmf-dist/tex/context/base/strc-syn.lua
+++ b/Master/texmf-dist/tex/context/base/strc-syn.lua
@@ -12,6 +12,9 @@ local allocate = utilities.storage.allocate
-- interface to tex end
+local context = context
+local sorters = sorters
+
local structures = structures
local synonyms = structures.synonyms
local tags = structures.tags
@@ -19,6 +22,10 @@ local tags = structures.tags
local collected = allocate()
local tobesaved = allocate()
+local firstofsplit = sorters.firstofsplit
+local strip = sorters.strip
+local splitter = sorters.splitters.utf
+
synonyms.collected = collected
synonyms.tobesaved = tobesaved
@@ -114,8 +121,6 @@ function synonyms.filter(data,options)
end
function synonyms.prepare(data)
- local strip = sorters.strip
- local splitter = sorters.splitters.utf
local result = data.result
if result then
for i=1, #result do
@@ -123,7 +128,7 @@ function synonyms.prepare(data)
local rd = r.definition
if rd then
local rt = rd.tag
- local sortkey = (rt and rt ~= "" and rt) or rd.synonym
+ local sortkey = rt and rt ~= "" and rt or rd.synonym
r.split = splitter(strip(sortkey))
end
end
@@ -140,13 +145,17 @@ function synonyms.finalize(data,options)
local split = { }
for k=1,#result do
local v = result[k]
- local entry, tag = sorters.firstofsplit(v)
+ local entry, tag = firstofsplit(v)
local s = split[entry] -- keeps track of change
+ local d
if not s then
- s = { tag = tag, data = { } }
+ d = { }
+ s = { tag = tag, data = d }
split[entry] = s
+ else
+ d = s.data
end
- s.data[#s.data+1] = v
+ d[#d+1] = v
end
data.result = split
end
@@ -154,24 +163,21 @@ end
-- for now, maybe at some point we will do a multipass or so
-- maybe pass the settings differently
+local ctx_synonymentry = context.synonymentry
+
function synonyms.flush(data,options)
local kind = data.metadata.kind -- hack, will be done better
- -- context[format("\\start%soutput",kind)]()
local result = data.result
local sorted = table.sortedkeys(result)
for k=1,#sorted do
local letter = sorted[k]
local sublist = result[letter]
local data = sublist.data
- -- context[format("\\start%ssection",kind)](sublist.tag)
for d=1,#data do
local entry = data[d].definition
- -- context[format("\\%sentry",kind)](d,entry.tag,entry.synonym,entry.meaning or "")
- context("\\%sentry{%s}{%s}{%s}{%s}",kind,d,entry.tag,entry.synonym,entry.meaning or "")
+ ctx_synonymentry(d,entry.tag,entry.synonym,entry.meaning or "")
end
- -- context[format("\\stop%ssection",kind)]()
end
- -- context[format("\\stop%soutput",kind)]()
data.result = nil
data.metadata.sorted = false
end
@@ -196,3 +202,8 @@ function synonyms.process(class,options)
end
end
+commands.registersynonym = synonyms.register
+commands.registerusedsynonym = synonyms.registerused
+commands.synonymmeaning = synonyms.meaning
+commands.synonymname = synonyms.synonym
+commands.processsynonyms = synonyms.process
diff --git a/Master/texmf-dist/tex/context/base/strc-syn.mkiv b/Master/texmf-dist/tex/context/base/strc-syn.mkiv
index e0087d45008..73aca18e601 100644
--- a/Master/texmf-dist/tex/context/base/strc-syn.mkiv
+++ b/Master/texmf-dist/tex/context/base/strc-syn.mkiv
@@ -20,20 +20,6 @@
\unprotect
-\ifdefined\dotagsynonym \else \let\dotagsynonym\relax \fi
-\ifdefined\dotagsorting \else \let\dotagsorting\relax \fi
-
-% general help, can be shared
-
-% simplifiedcommands -> flag in lua
-%
-% expansion
-% criterium -> when start, then flag in list
-% command-> wanneer?
-% state -> flagging enabled
-% conversion ?
-% todo: register xml mode etc
-
% split but common in lua
\def\preprocessexpansion#1#2#3#4%
@@ -51,13 +37,93 @@
\globallet#3\s!tex
\fi}
-\installcorenamespace{synonym}
+%D We now use a simple list variant:
+
+\installcorenamespace {simplelist}
+
+\installcommandhandler \??simplelist {simplelist} \??simplelist
+
+\let\setupsimplelists\setupsimplelist
+
+\setupsimplelists[%
+ %c!title=,
+ %c!text=,
+ %
+ %c!style=,
+ %c!color=,
+ %c!command=,
+ %c!align=,
+ %
+ %c!headstyle=,
+ %c!headcolor=,
+ %c!headalign=,
+ %
+ %c!titlestyle=,
+ %c!titlecolor=,
+ %c!titlecommand=,
+ %c!titleleft=,
+ %c!titleright=,
+ %
+ %c!closesymbol=,
+ %c!closecommand=,
+ %
+ \c!alternative=\v!left,
+ \c!display=\v!yes,
+ \c!width=7\emwidth,
+ \c!distance=\emwidth,
+ \c!titledistance=.5\emwidth,
+ %c!hang=,
+ %c!sample=,
+ \c!margin=\v!no,
+ \c!before=\blank,
+ \c!inbetween=\blank,
+ \c!after=\blank,
+ %c!indentnext=,
+ %c!indenting=,
+ %
+ \c!expansion=\v!no,
+ %c!xmlsetup=,
+ %s!catcodes=,
+ \s!language=\currentmainlanguage,
+]
+
+\appendtoks
+ \setfalse\c_strc_constructions_define_commands
+ \ifx\currentsimplelistparent\empty
+ \defineconstruction[\currentsimplelist][\s!handler=\v!simplelist,\c!level=1]%
+ \else
+ \defineconstruction[\currentsimplelist][\currentsimplelistparent][\s!handler=\v!simplelist,\c!level=1]%
+ \fi
+ \settrue\c_strc_constructions_define_commands
+\to \everydefinesimplelist
+
+\setuvalue{\??constructioninitializer\v!simplelist}%
+ {\let\currentsimplelist \currentconstruction
+ \let\constructionparameter \simplelistparameter
+ \let\detokenizedconstructionparameter\detokenizedsimplelistparameter
+ \let\letconstructionparameter \letsimplelistparameter
+ \let\useconstructionstyleandcolor \usesimpleliststyleandcolor
+ \let\setupcurrentconstruction \setupcurrentsimplelist}
+
+\setuvalue{\??constructionfinalizer\v!simplelist}%
+ {}
+
+\setuvalue{\??constructiontexthandler\v!simplelist}%
+ {\begingroup
+ \useconstructionstyleandcolor\c!headstyle\c!headcolor
+ \the\everyconstruction
+ \constructionparameter\c!headcommand
+ {\strut
+ \currentsimplelistentry}%
+ \endgroup}
-\installsimplecommandhandler \??synonym {synonym} \??synonym
+% And we build on top of this.
-\let\setupsynonyms\setupsynonym
+\ifdefined\dotagsynonym \else \let\dotagsynonym\relax \fi
+\ifdefined\dotagsorting \else \let\dotagsorting\relax \fi
-\setupsynonyms
+\definesimplelist
+ [\v!synonym]
[\c!state=\v!start,
%\c!synonymstyle=,
%\c!textstyle=,
@@ -75,50 +141,62 @@
%\c!after=,
\c!indentnext=\v!no,
%\c!expansion=,
- \c!method=,
- \s!language=\currentmainlanguage]
+ \c!method=]
+
+\let\setupsynonyms\setupsimplelist
\unexpanded\def\definesynonyms
- {\doquadrupleempty\dodefinesynonyms}
+ {\doquadrupleempty\strc_synonyms_define}
-\def\dodefinesynonyms[#1][#2][#3][#4]% name plural \meaning \use
+\def\strc_synonyms_define[#1][#2][#3][#4]% name plural \meaning \use
{\edef\currentsynonym{#1}%
\iffourthargument
- \unexpanded\def#4##1{\doinsertsynonym{#1}{##1}}% name tag
+ \unexpanded\def#4##1{\strc_synonyms_insert{#1}{##1}}% name tag
\ifthirdargument
- \unexpanded\def#3##1{\doinsertsynonymmeaning{#1}{##1}}% \meaning
+ \unexpanded\def#3##1{\strc_synonyms_insert_meaning{#1}{##1}}% \meaning
\fi
\setuvalue{#1}{\definesynonym[\v!no][#1]}% \name
\else
\ifthirdargument
- \unexpanded\def#3##1{\doinsertsynonymmeaning{#1}{##1}}% \meaning
+ \unexpanded\def#3##1{\strc_synonyms_insert_meaning{#1}{##1}}% \meaning
\fi
\setuvalue{#1}{\definesynonym[\v!yes][#1]}% \name
\fi
- \checksynonymparent
- \setupcurrentsynonym[\s!single={#1},\s!multi={#2}]%
+ %
+% \checksynonymparent
+% \setupcurrentsynonym[\s!single={#1},\s!multi={#2}]%
+ \setfalse\c_strc_constructions_define_commands
+ \definesimplelist
+ [\currentsynonym]%
+ [\v!sorting]
+ [\s!single={#1},%
+ \s!multi={#2}]%
+ \settrue\c_strc_constructions_define_commands
+ %
\presetheadtext[#2=\Word{#2}]% changes the \if...argument
+ %
\setvalue{\e!setup #2\e!endsetup}{\setupsynonym[#1]}% obsolete definition
\setvalue{\e!place \e!listof#2}{\placelistofsynonyms[#1]}% accepts extra argument
\setvalue{\e!complete\e!listof#2}{\completelistofsynonyms[#1]}}
\unexpanded\def\definesynonym
- {\dotripleempty\dodefinesynonym}
+ {\dotripleempty\strc_synonyms_define_entry}
-\def\dodefinesynonym[#1][#2][#3]#4#5%
+\def\strc_synonyms_define_entry[#1][#2][#3]#4#5%
{\begingroup
\edef\currentsynonym{#2}%
\edef\currentsynonymtag{#3}%
+ \let\currentsimplelist\currentsimplelist
\ifx\currentsynonymtag\empty
\edef\currentsynonymtag{#4}%
\fi
\ifx\currentsynonymtag\empty
% todo: error message
\else
- \edef\currentsynonymexpansion{\synonymparameter\c!expansion}%
+ \edef\currentsynonymexpansion{\simplelistparameter\c!expansion}%
\preprocessexpansion\currentsynonymexpansion\currentsynonymtext \currentsynonymcoding{#4}%
\preprocessexpansion\currentsynonymexpansion\currentsynonymmeaning\currentsynonymcoding{#5}%
- \ctxlua{structures.synonyms.register("\currentsynonym", "synonym", {
+ \ctxcommand{registersynonym("\currentsynonym", "synonym", {
metadata = {
catcodes = \the\catcodetable,
coding = "\currentsynonymcoding",
@@ -131,91 +209,77 @@
used = false,
}
})}%
- \doif{#1}\v!yes{\setuxvalue\currentsynonymtag{\noexpand\doinsertsynonym{\currentsynonym}{\currentsynonymtag}}}%
+ \doif{#1}\v!yes{\setuxvalue\currentsynonymtag{\strc_synonyms_insert{\currentsynonym}{\currentsynonymtag}}}%
\fi
\endgroup}
\unexpanded\def\registersynonym
- {\dodoubleargument\doregistersynonym}
+ {\dodoubleargument\strc_synonyms_register}
-\def\doregistersynonym[#1][#2]%
- {\ctxlua{structures.synonyms.registerused("#1","#2")}}
+\def\strc_synonyms_register[#1][#2]%
+ {\ctxcommand{registerusedsynonym("#1","#2")}}
-\unexpanded\def\doinsertsynonymmeaning#1#2% name tag
+\unexpanded\def\strc_synonyms_insert_meaning#1#2% name tag
{\begingroup
- \def\currentsynonym{#1}%
- \usesynonymstyleandcolor\c!textstyle\c!textcolor
- \synonymparameter\c!textcommand{\ctxlua{structures.synonyms.meaning("#1","#2")}}%
+ \def\currentsimplelist{#1}%
+ \usesimpleliststyleandcolor\c!textstyle\c!textcolor
+ \simplelistparameter\c!textcommand{\ctxcommand{synonymmeaning("#1","#2")}}%
\endgroup}
-\unexpanded\def\doinsertsynonym#1#2% name tag
+\unexpanded\def\strc_synonyms_insert#1#2% name tag
{\begingroup
- \def\currentsynonym{#1}%
+ \edef\currentsimplelist{#1}%
+ \let\currentsynonym\currentsimplelist % for a while
\def\currentsynonymtag{#2}%
\dostarttagged\t!synonym\currentsynonym
\dotagsynonym
- \usesynonymstyleandcolor\c!synonymstyle\c!synonymcolor
- \synonymparameter\c!synonymcommand{\ctxlua{structures.synonyms.synonym("#1","#2")}}%
+ \usesimpleliststyleandcolor\c!synonymstyle\c!synonymcolor
+ \simplelistparameter\c!synonymcommand{\ctxcommand{synonymname("#1","#2")}}%
\dostoptagged
- \normalexpanded{\endgroup\synonymparameter\c!next}}
+ \normalexpanded{\endgroup\simplelistparameter\c!next}}
\unexpanded\def\placelistofsynonyms
- {\dodoubleempty\doplacelistofsynonyms}
+ {\dodoubleempty\strc_synonyms_place_list}
-\def\doplacelistofsynonyms[#1][#2]%
+\def\strc_synonyms_place_list[#1][#2]%
{\begingroup
- \def\currentsynonym{#1}%
- \definedescription % todo, per class
- [syndef]
- [\c!location=\synonymparameter\c!location,
- \c!width=\synonymparameter\c!width,
- \c!distance=\synonymparameter\c!distance,
- \c!sample=\synonymparameter\c!sample,
- \c!hang=\synonymparameter\c!hang,
- \c!align=\synonymparameter\c!align,
- \c!before=\synonymparameter\c!before,
- \c!inbetween=\synonymparameter\c!inbetween,
- \c!after=\synonymparameter\c!after,
- \c!indentnext=\synonymparameter\c!indentnext,
- \c!headstyle=\synonymparameter\c!textstyle,
- \c!headcolor=\synonymparameter\c!textcolor,
- \c!style=,
- \c!color=.
- #2]%
+ \edef\currentsimplelist{#1}%
+ \strc_constructions_initialize{#1}%
+ \setupcurrentsimplelist[#2]%
+ \let\synonymentry\strc_synonym_normal
\startpacked
- \ctxlua{structures.synonyms.process('#1',{
- criterium = "\synonymparameter\c!criterium",
- language = "\synonymparameter\s!language",
- method = "\synonymparameter\c!method",
+ \ctxcommand{processsynonyms('#1',{
+ criterium = "\simplelistparameter\c!criterium",
+ language = "\simplelistparameter\s!language",
+ method = "\simplelistparameter\c!method",
})}%
\stoppacked
\endgroup}
\def\completelistofsynonyms
- {\dodoubleempty\docompletelistofsynonyms}
+ {\dodoubleempty\strc_synonyms_complete_list}
-\def\docompletelistofsynonyms[#1][#2]%
- {\edef\currentsynonym{#1}%
- \normalexpanded{\startnamedsection[\v!chapter][\c!title={\headtext{\synonymparameter\s!multi}},\c!reference=#1]}%
- \doplacelistofsynonyms[#1][#2]%
+\def\strc_synonyms_complete_list[#1][#2]%
+ {\begingroup
+ \edef\currentsimplelist{#1}%
+ \normalexpanded{\startnamedsection[\v!chapter][\c!title={\headtext{\simplelistparameter\s!multi}},\c!reference=#1]}%
+ \strc_synonyms_place_list[#1][#2]%
\page
- \stopnamedsection}
-
-\let\startsynonymoutput \relax
-\let\stopsynonymoutput \relax
-\let\startsynonymsection\gobbleoneargument
-\let\stopsynonymsection \relax
+ \stopnamedsection
+ \endgroup}
-\unexpanded\def\synonymentry#1#2#3#4%
- {\syndef{#3}#4\par}
+\unexpanded\def\strc_synonym_normal#1#2#3#4%
+ {\begingroup
+ \def\currentsimplelistentry{#3}%
+ \csname\??constructionstarthandler\v!construction\endcsname
+ #4%
+ \csname\??constructionstophandler\v!construction\endcsname
+ \endgroup}
%D Sorting (a simplified version of synonym).
-\installcorenamespace{sorting}
-
-\installsimplecommandhandler \??sorting {sorting} \??sorting
-
-\setupsorting
+\definesimplelist
+ [\v!sorting]
[\c!state=\v!start,
%\c!command=, % we test for defined !
%\c!criterium=,
@@ -223,48 +287,57 @@
%\c!before=,
\c!after=\endgraf,
%\c!expansion=,
- \c!method=,
- \s!language=\currentmainlanguage]
+ \c!method=]
+
+\let\setupsorting\setupsimplelist
\unexpanded\def\definesorting
- {\dotripleempty\dodefinesorting}
+ {\dotripleempty\strc_sorting_define}
% if #3=\relax or \v!none, then no command but still protected
-\def\dodefinesorting[#1][#2][#3]%
+\def\strc_sorting_define[#1][#2][#3]%
{\edef\currentsorting{#1}%
\ifthirdargument
\doifnot{#3}\v!none
{\ifx#3\relax \else
- \unexpanded\def#3##1{\doinsertsort{#1}{##1}}%
+ \unexpanded\def#3##1{\strc_sorting_insert{#1}{##1}}%
\fi}%
\setuvalue{#1}{\definesort[\v!no][#1]}%
\else
\setuvalue{#1}{\definesort[\v!yes][#1]}%
\fi
- \checksortingparent
- \setupcurrentsorting[\s!multi={#2}]%
+ \setfalse\c_strc_constructions_define_commands
+ \definesimplelist
+ [\currentsorting]%
+ [\v!sorting]
+ [\s!single={#1},%
+ \s!multi={#2}]%
+ \settrue\c_strc_constructions_define_commands
+ %
\presetheadtext[#2=\Word{#2}]% after \ifthirdargument -)
+ %
\setvalue{\e!setup #2\e!endsetup}{\setupsorting[#1]}% obsolete definition
\setvalue{\e!place \e!listof#2}{\placelistofsorts[#1]}%
\setvalue{\e!complete\e!listof#2}{\completelistofsorts[#1]}}
\unexpanded\def\definesort
- {\dotripleempty\dodefinesort}
+ {\dotripleempty\strc_sorting_define_entry}
-\def\dodefinesort[#1][#2][#3]#4%
+\def\strc_sorting_define_entry[#1][#2][#3]#4%
{\begingroup
\edef\currentsorting{#2}%
\edef\currentsortingtag{#3}%
+ \let\currentsimplelist\currentsimplelist
\ifx\currentsortingtag\empty
\edef\currentsortingtag{#4}%
\fi
\ifx\currentsortingtag\empty
% todo: error message
\else
- \edef\currentsortingexpansion{\sortingparameter\c!expansion}%
+ \edef\currentsortingexpansion{\simplelistparameter\c!expansion}%
\preprocessexpansion\currentsortingexpansion\currentsortingtext\currentsortingcoding{#4}%
- \ctxlua{structures.synonyms.register("\currentsorting", "sorting", {
+ \ctxcommand{registersynonym("\currentsorting", "sorting", {
metadata = {
catcodes = \the\catcodetable,
coding = "\currentsortingcoding",
@@ -276,67 +349,77 @@
% used = false,
}
})}%
- \doif{#1}\v!yes{\setuxvalue\currentsortingtag{\noexpand\doinsertsort{\currentsorting}{\currentsortingtag}}}%
+ \doif{#1}\v!yes{\setuxvalue\currentsortingtag{\strc_sorting_insert{\currentsorting}{\currentsortingtag}}}%
\fi
\endgroup}
-\unexpanded\def\doinsertsort#1#2% name tag
+\unexpanded\def\strc_sorting_insert#1#2% name tag
{\begingroup
% no kap currently, of .. we need to map cap onto WORD
\edef\currentsorting{#1}%
\def\currentsortingtag{#2}%
+ \let\currentsimplelist\currentsorting
\dostarttagged\t!sorting\currentsorting
\dotagsorting
- \usesortingstyleandcolor\c!style\c!color
- \ctxlua{structures.synonyms.synonym("#1","#2")}%
+ \usesimpleliststyleandcolor\c!style\c!color
+ \ctxcommand{synonymname("#1","#2")}%
\dostoptagged
- \normalexpanded{\endgroup\sortingparameter\c!next}}
+ \normalexpanded{\endgroup\simplelistparameter\c!next}}
\unexpanded\def\registersort
- {\dodoubleargument\doregistersort}
+ {\dodoubleargument\strc_sorting_register}
-\def\doregistersort[#1][#2]%
- {\ctxlua{structures.synonyms.registerused("#1","#2")}}
+\def\strc_sorting_register[#1][#2]%
+ {\ctxcommand{registerusedsynonym("#1","#2")}}
% before after
%
% maybe just 'commandset' and then combine
\unexpanded\def\placelistofsorts
- {\dodoubleempty\doplacelistofsorts}
+ {\dodoubleempty\strc_sorting_place_list}
-\def\doplacelistofsorts[#1][#2]% NOG EEN RUWE VERSIE MAKEN ZONDER WITRUIMTE ETC ETC
+\def\strc_sorting_place_list[#1][#2]%
{\begingroup
- \def\currentsorting{#1}%
- \setupcurrentsorting[#2]%
+ \edef\currentsimplelist{#1}%
+ \strc_constructions_initialize{#1}%
+ \setupcurrentsimplelist[#2]%
+ \edef\p_simplelist_command{\simplelistparameter\c!command}%
+ \ifx\p_simplelist_command\empty
+ \let\synonymentry\strc_sorting_normal
+ \else
+ \let\synonymentry\strc_sorting_command
+ \fi
\startpacked
- \ctxlua{structures.synonyms.process('#1',{
- criterium = "\sortingparameter\c!criterium",
- language = "\sortingparameter\s!language",
- method = "\sortingparameter\c!method",
+ \ctxcommand{processsynonyms('#1',{
+ criterium = "\simplelistparameter\c!criterium",
+ language = "\simplelistparameter\s!language",
+ method = "\simplelistparameter\c!method",
})}%
\stoppacked
\endgroup}
\unexpanded\def\completelistofsorts
- {\dodoubleempty\docompletelistofsorts}
+ {\dodoubleempty\strc_sorting_complete_list}
-\def\docompletelistofsorts[#1][#2]%
- {\edef\currentsorting{#1}%
- \normalexpanded{\startnamedsection[\v!chapter][\c!title={\headtext{\sortingparameter\s!multi}},\c!reference=#1]}%
- \doplacelistofsorts[#1][#2]%
+\def\strc_sorting_complete_list[#1][#2]%
+ {\begingroup
+ \edef\currentsimplelist{#1}%
+ \normalexpanded{\startnamedsection[\v!chapter][\c!title={\headtext{\simplelistparameter\s!multi}},\c!reference=#1]}%
+ \strc_sorting_place_list[#1][#2]%
\page
- \stopnamedsection}
+ \stopnamedsection
+ \endgroup}
-\let\startsortingoutput \relax
-\let\stopsortingoutput \relax
-\let\startsortingsection\gobbleoneargument
-\let\stopsortingsection \relax
+\def\strc_sorting_command#1#2#3#4% #4 is meaning but empty here
+ {\p_simplelist_command{#1}{#2}{#3}}
-\def\sortingentry#1#2#3#4% #4 is meaning but empty here
- {\doifelsenothing{\sortingparameter\c!command}
- {\begingroup\usesortingstyleandcolor\c!style\c!color#3\endgroup\par} % todo
- {\sortingparameter\c!command{#1}{#2}{#3}}}
+\def\strc_sorting_normal#1#2#3#4% #4 is meaning but empty here
+ {\begingroup
+ \usesimpleliststyleandcolor\c!style\c!color
+ #3%
+ \endgroup
+ \par}
%D Presets.
diff --git a/Master/texmf-dist/tex/context/base/supp-box.lua b/Master/texmf-dist/tex/context/base/supp-box.lua
index c7382834a61..3c5a3383d01 100644
--- a/Master/texmf-dist/tex/context/base/supp-box.lua
+++ b/Master/texmf-dist/tex/context/base/supp-box.lua
@@ -6,12 +6,16 @@ if not modules then modules = { } end modules ['supp-box'] = {
license = "see context related readme files"
}
--- this is preliminary code
+-- this is preliminary code, use insert_before etc
local report_hyphenation = logs.reporter("languages","hyphenation")
-local tex, node = tex, node
-local context, commands, nodes = context, commands, nodes
+local tex = tex
+local context = context
+local commands = commands
+local nodes = nodes
+
+local splitstring = string.split
local nodecodes = nodes.nodecodes
@@ -19,94 +23,230 @@ local disc_code = nodecodes.disc
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
local glyph_code = nodecodes.glyph
-local new_penalty = nodes.pool.penalty
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattribute = nuts.getattribute
+local getbox = nuts.getbox
+
+local setfield = nuts.setfield
+local setbox = nuts.setbox
+
+local free_node = nuts.free
+local copy_list = nuts.copy_list
+local copy_node = nuts.copy
+local find_tail = nuts.tail
-local free_node = node.free
-local copynodelist = node.copy_list
-local copynode = node.copy
-local texbox = tex.box
+local listtoutf = nodes.listtoutf
-local function hyphenatedlist(list)
- while list do
- local id, next, prev = list.id, list.next, list.prev
+local nodepool = nuts.pool
+local new_penalty = nodepool.penalty
+local new_hlist = nodepool.hlist
+local new_glue = nodepool.glue
+
+local texget = tex.get
+
+local function hyphenatedlist(head)
+ local current = head and tonut(head)
+ while current do
+ local id = getid(current)
+ local next = getnext(current)
+ local prev = getprev(current)
if id == disc_code then
- local hyphen = list.pre
+ local hyphen = getfield(current,"pre")
if hyphen then
local penalty = new_penalty(-500)
- hyphen.next, penalty.prev = penalty, hyphen
- prev.next, next.prev = hyphen, penalty
- penalty.next, hyphen.prev = next, prev
- list.pre = nil
- free_node(list)
+ -- insert_after etc
+ setfield(hyphen,"next",penalty)
+ setfield(penalty,"prev",hyphen)
+ setfield(prev,"next",hyphen)
+ setfield(next,"prev", penalty)
+ setfield(penalty,"next",next)
+ setfield(hyphen,"prev",prev)
+ setfield(current,"pre",nil)
+ free_node(current)
end
elseif id == vlist_code or id == hlist_code then
- hyphenatedlist(list.list)
+ hyphenatedlist(getlist(current))
end
- list = next
+ current = next
end
end
commands.hyphenatedlist = hyphenatedlist
function commands.showhyphenatedinlist(list)
- report_hyphenation("show: %s",nodes.listtoutf(list,false,true))
+ report_hyphenation("show: %s",listtoutf(tonut(list),false,true))
end
local function checkedlist(list)
if type(list) == "number" then
- return texbox[list].list
+ return getlist(getbox(tonut(list)))
else
- return list
+ return tonut(list)
end
end
-local function applytochars(list,what,nested)
- local doaction = context[what or "ruledhbox"]
- local noaction = context
- local current = checkedlist(list)
+local function applytochars(current,doaction,noaction,nested)
while current do
- local id = current.id
+ local id = getid(current)
if nested and (id == hlist_code or id == vlist_code) then
context.beginhbox()
- applytochars(current.list,what,nested)
+ applytochars(getlist(current),what,nested)
context.endhbox()
elseif id ~= glyph_code then
- noaction(copynode(current))
+ noaction(tonode(copy_node(current)))
else
- doaction(copynode(current))
+ doaction(tonode(copy_node(current)))
end
- current = current.next
+ current = getnext(current)
end
end
-local function applytowords(list,what,nested)
- local doaction = context[what or "ruledhbox"]
- local noaction = context
- local current = checkedlist(list)
+local function applytowords(current,doaction,noaction,nested)
local start
while current do
- local id = current.id
+ local id = getid(current)
if id == glue_code then
if start then
- doaction(copynodelist(start,current))
+ doaction(tonode(copy_list(start,current)))
start = nil
end
- noaction(copynode(current))
+ noaction(tonode(copy_node(current)))
elseif nested and (id == hlist_code or id == vlist_code) then
context.beginhbox()
- applytowords(current.list,what,nested)
+ applytowords(getlist(current),what,nested)
context.egroup()
elseif not start then
start = current
end
- current = current.next
+ current = getnext(current)
end
if start then
- doaction(copynodelist(start))
+ doaction(tonode(copy_list(start)))
+ end
+end
+
+commands.applytochars = function(list,what,nested) applytochars(checkedlist(list),context[what or "ruledhbox"],context,nested) end
+commands.applytowords = function(list,what,nested) applytowords(checkedlist(list),context[what or "ruledhbox"],context,nested) end
+
+local split_char = lpeg.Ct(lpeg.C(1)^0)
+local split_word = lpeg.tsplitat(lpeg.patterns.space)
+local split_line = lpeg.tsplitat(lpeg.patterns.eol)
+
+function commands.processsplit(str,command,how,spaced)
+ how = how or "word"
+ if how == "char" then
+ local words = lpeg.match(split_char,str)
+ for i=1,#words do
+ local word = words[i]
+ if word == " " then
+ if spaced then
+ context.space()
+ end
+ elseif command then
+ context[command](word)
+ else
+ context(word)
+ end
+ end
+ elseif how == "word" then
+ local words = lpeg.match(split_word,str)
+ for i=1,#words do
+ local word = words[i]
+ if spaced and i > 1 then
+ context.space()
+ end
+ if command then
+ context[command](word)
+ else
+ context(word)
+ end
+ end
+ elseif how == "line" then
+ local words = lpeg.match(split_line,str)
+ for i=1,#words do
+ local word = words[i]
+ if spaced and i > 1 then
+ context.par()
+ end
+ if command then
+ context[command](word)
+ else
+ context(word)
+ end
+ end
+ else
+ context(str)
+ end
+end
+
+local a_vboxtohboxseparator = attributes.private("vboxtohboxseparator")
+
+function commands.vboxlisttohbox(original,target,inbetween)
+ local current = getlist(getbox(original))
+ local head = nil
+ local tail = nil
+ while current do
+ local id = getid(current)
+ local next = getnext(current)
+ if id == hlist_code then
+ local list = getlist(current)
+ if head then
+ if inbetween > 0 then
+ local n = new_glue(0,0,inbetween)
+ setfield(tail,"next",n)
+ setfield(n,"prev",tail)
+ tail = n
+ end
+ setfield(tail,"next",list)
+ setfield(list,"prev",tail)
+ else
+ head = list
+ end
+ tail = find_tail(list)
+ -- remove last separator
+ if getid(tail) == hlist_code and getattribute(tail,a_vboxtohboxseparator) == 1 then
+ local temp = tail
+ local prev = getprev(tail)
+ if next then
+ local list = getlist(tail)
+ setfield(prev,"next",list)
+ setfield(list,"prev",prev)
+ setfield(tail,"list",nil)
+ tail = find_tail(list)
+ else
+ tail = prev
+ end
+ free_node(temp)
+ end
+ -- done
+ setfield(tail,"next",nil)
+ setfield(current,"list",nil)
+ end
+ current = next
end
+ local result = new_hlist()
+ setfield(result,"list",head)
+ setbox(target,result)
+end
+
+function commands.hboxtovbox(original)
+ local b = getbox(original)
+ local factor = texget("baselineskip").width / texget("hsize")
+ setfield(b,"depth",0)
+ setfield(b,"height",getfield(b,"width") * factor)
end
-commands.applytochars = applytochars
-commands.applytowords = applytowords
+function commands.boxtostring(n)
+ context.puretext(nodes.toutf(tex.box[n].list)) -- helper is defined later
+end
diff --git a/Master/texmf-dist/tex/context/base/supp-box.mkiv b/Master/texmf-dist/tex/context/base/supp-box.mkiv
index 83309622261..66f373b72a7 100644
--- a/Master/texmf-dist/tex/context/base/supp-box.mkiv
+++ b/Master/texmf-dist/tex/context/base/supp-box.mkiv
@@ -1077,12 +1077,12 @@
%D \stoptyping
\def\dohyphenatednextbox
- {\ctxcommand{hyphenatedlist(tex.box[\number\nextbox])}%
+ {\ctxcommand{hyphenatedlist(tex.box[\number\nextbox].list)}%
\unhbox\nextbox}
-\unexpanded\def\hyphenatedword {\dowithnextboxcs\dohyphenatednextbox \hbox}
-\unexpanded\def\hyphenatedpar {\dowithnextboxcs\dohyphenatednextbox \hbox}
-\unexpanded\def\hyphenatedfile#1{\dowithnextboxcs\dohyphenatednextbox \hbox{\readfile{#1}\donothing\donothing}}
+\unexpanded\def\hyphenatedword {\dowithnextboxcs\dohyphenatednextbox\hbox}
+\unexpanded\def\hyphenatedpar {\dowithnextboxcs\dohyphenatednextbox\hbox}
+\unexpanded\def\hyphenatedfile#1{\dowithnextboxcs\dohyphenatednextbox\hbox{\readfile{#1}\donothing\donothing}}
%D \macros
%D {processtokens}
@@ -1346,11 +1346,11 @@
%D {processisolatedwords,processisolatedchars}
%D
%D \startbuffer
-%D \processisolatedchars{some more words} \ruledhbox \par
-%D \processisolatedchars{and some $x + y = z$ math} \ruledhbox \par
+%D \processisolatedchars{some more words} \ruledhbox \par
+%D \processisolatedchars{and some $x + y = z$ math} \ruledhbox \par
%D \processisolatedchars{and a \hbox{$x + y = z$}} \ruledhbox \par
-%D \processisolatedwords{some more words} \ruledhbox \par
-%D \processisolatedwords{and some $x + y = z$ math} \ruledhbox \par
+%D \processisolatedwords{some more words} \ruledhbox \par
+%D \processisolatedwords{and some $x + y = z$ math} \ruledhbox \par
%D \processisolatedwords{and a \hbox{$x + y = z$}} \ruledhbox \par
%D \stopbuffer
%D
@@ -1379,7 +1379,15 @@
\let\processword\relax
-%D The better variant:
+\unexpanded\def\applytosplitstringchar#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","char")}}
+\unexpanded\def\applytosplitstringword#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","word")}}
+\unexpanded\def\applytosplitstringline#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","line")}}
+
+\unexpanded\def\applytosplitstringcharspaced#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","char",true)}}
+\unexpanded\def\applytosplitstringwordspaced#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","word",true)}}
+\unexpanded\def\applytosplitstringlinespaced#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","line",true)}}
+
+%D A variant:
\unexpanded\def\applytocharacters#1%
{\dontleavehmode
@@ -1730,35 +1738,71 @@
%D These macros are used in reformatting footnotes, so they do
%D what they're meant for.
-\unexpanded\def\setvboxtohbox
- {\bgroup
- \ifdim\baselineskip<16pt \relax
- \scratchdimen\baselineskip
- \multiply\scratchdimen 1024
- \else
- \message{cropping \baselineskip to 16pt}%
- \scratchdimen\maxdimen
- \fi
- \divide\scratchdimen \hsize
- \multiply\scratchdimen 64
- \xdef\vboxtohboxfactor{\withoutpt\the\scratchdimen}%
- \egroup}
+\newdimen\vboxtohboxslack
+\newdimen\hboxestohboxslack
+
+% Create line and fake height of paragraph by messign with heights:
+% a nice hack by DEK himself.
+
+%\unexpanded\def\setvboxtohbox
+% {\bgroup
+% \ifdim\baselineskip<16pt \relax
+% \scratchdimen\baselineskip
+% \multiply\scratchdimen 1024
+% \else
+% \message{cropping \baselineskip to 16pt}%
+% \scratchdimen\maxdimen
+% \fi
+% \divide\scratchdimen \hsize
+% \multiply\scratchdimen 64
+% \xdef\vboxtohboxfactor{\withoutpt\the\scratchdimen}%
+% \egroup}
+%
+% \unexpanded\def\startvboxtohbox
+% {\bgroup
+% \setvboxtohbox
+% \setbox\scratchbox\hbox\bgroup}
+%
+% \unexpanded\def\stopvboxtohbox
+% {\ifcase\vboxtohboxslack\else\hskip\zeropoint\!!minus\vboxtohboxslack\fi
+% \egroup
+% \dp\scratchbox\zeropoint
+% \ht\scratchbox\vboxtohboxfactor\wd\scratchbox
+% \box\scratchbox
+% \egroup}
+
+% More modern:
+
+% \definesystemattribute[vboxtohboxseparator][public]
+
+%newbox\d_syst_boxes_vboxtohbox
+\newbox\d_syst_boxes_separator
+
+\unexpanded\def\startvboxtohboxseparator
+ {\setbox\d_syst_boxes_separator\hbox attr \vboxtohboxseparatorattribute\plusone\bgroup}
+
+\unexpanded\def\stopvboxtohboxseparator
+ {\egroup}
\unexpanded\def\startvboxtohbox
- {\bgroup
- \setvboxtohbox
- \setbox\scratchbox\hbox\bgroup}
+ {\begingroup
+ \setbox\scratchbox\hbox\bgroup}
\unexpanded\def\stopvboxtohbox
- {\egroup
- \dp\scratchbox\zeropoint
- \ht\scratchbox\vboxtohboxfactor\wd\scratchbox
+ {\ifvoid\d_syst_boxes_separator
+ \hskip\zeropoint\ifcase\vboxtohboxslack\else\s!minus\vboxtohboxslack\fi % we really need a skip
+ \else
+ \box\d_syst_boxes_separator
+ \fi
+ \egroup
+ \ctxcommand{hboxtovbox(\number\scratchbox)}%
\box\scratchbox
- \egroup}
+ \endgroup}
+
+% A possible reconstruction:
\unexpanded\def\convertvboxtohbox
- {\setvboxtohbox
- \makehboxofhboxes
+ {\makehboxofhboxes
\setbox0\hbox{\unhbox0 \removehboxes}%
\noindent\unhbox0\par}
@@ -1776,6 +1820,42 @@
{\removehboxes}\unhbox0
\fi}
+% And one special for notes:
+
+% \unexpanded\def\starthboxestohbox
+% {\bgroup
+% \beginofshapebox}
+%
+% \unexpanded\def\stophboxestohbox
+% {\endofshapebox
+% \doreshapebox
+% {\hbox\bgroup
+% \unhbox\shapebox
+% \ifcase\hboxestohboxslack\else\hskip\zeropoint\!!minus\hboxestohboxslack\fi
+% \egroup}%
+% \donothing
+% \donothing
+% \donothing % get rid of penalties etc
+% \innerflushshapebox
+% \convertvboxtohbox
+% \par
+% \egroup}
+
+% More modern:
+
+\unexpanded\def\starthboxestohbox
+ {\bgroup
+ \setbox\scratchbox\vbox\bgroup}
+
+\unexpanded\def\stophboxestohbox
+ {\egroup
+ \ctxcommand{vboxlisttohbox(\number\scratchbox,\number\nextbox,\number\dimexpr\hboxestohboxslack)}%
+ \dontleavehmode
+ \unhbox\nextbox
+ \removeunwantedspaces
+ \par
+ \egroup}
+
%D \macros
%D {unhhbox}
%D
@@ -2370,45 +2450,84 @@
%D \hbox{y:\foundbox{two}{a}} \par
%D \stoptyping
-\def\@@stackbox{@box@}
-\def\@@stacklst{@xob@}
+\installcorenamespace {stackbox}
+\installcorenamespace {stacklst}
\unexpanded\def\setstackbox#1#2%
- {\ifcsname\@@stackbox:#1:#2\endcsname\else
- \expandafter\newbox\csname\@@stackbox:#1:#2\endcsname
+ {\ifcsname\??stackbox#1:#2\endcsname\else
+ \expandafter\newbox\csname\??stackbox#1:#2\endcsname
\fi
- \global\setbox\csname\@@stackbox:#1:#2\endcsname\vbox}
+ \global\setbox\csname\??stackbox#1:#2\endcsname\vbox}
\unexpanded\def\initializeboxstack#1%
{\def\docommand##1{\setstackbox{#1}{##1}{}}%
- \ifcsname\@@stacklst#1\endcsname
- \processcommacommand[\getvalue{\@@stacklst#1}]\docommand
+ \ifcsname\??stacklst#1\endcsname
+ \processcommacommand[\getvalue{\??stacklst#1}]\docommand
\fi
- \letgvalueempty{\@@stacklst#1}}
+ \letgvalueempty{\??stacklst#1}}
\unexpanded\def\savebox#1#2% stack name
{% beware, \setxvalue defines the cs beforehand so we cannot use the
% test inside the { }
- \ifcsname\@@stacklst#1\endcsname
- \setxvalue{\@@stacklst#1}{\csname\@@stacklst#1\endcsname,#2}%
+ \ifcsname\??stacklst#1\endcsname
+ \setxvalue{\??stacklst#1}{\csname\??stacklst#1\endcsname,#2}%
\else
- \setxvalue{\@@stacklst#1}{#2}%
+ \setxvalue{\??stacklst#1}{#2}%
\fi
\setstackbox{#1}{#2}}
-\unexpanded\def\foundbox#1#2%
+\unexpanded\def\restorebox#1#2% unwrapped
+ {\ifcsname\??stackbox#1:#2\endcsname
+ \copy\csname\??stackbox#1:#2\endcsname
+ \else
+ \emptybox
+ \fi}
+
+\unexpanded\def\foundbox#1#2% wrapped
{\vbox
- {\ifcsname\@@stackbox:#1:#2\endcsname
- \copy\csname\@@stackbox:#1:#2\endcsname
+ {\ifcsname\??stackbox#1:#2\endcsname
+ \copy\csname\??stackbox#1:#2\endcsname
\fi}}
\unexpanded\def\doifboxelse#1#2#3#4%
- {\ifcsname\@@stackbox:#1:#2\endcsname
- \ifvoid\csname\@@stackbox:#1:#2\endcsname#4\else#3\fi
+ {\ifcsname\??stackbox#1:#2\endcsname
+ \ifvoid\csname\??stackbox#1:#2\endcsname#4\else#3\fi
\else
#4%
\fi}
+%D This one is cheaper (the above is no longer used that much):
+
+\installcorenamespace {boxstack}
+
+\newcount\c_syst_boxes_stack
+\let \b_syst_boxes_stack\relax
+
+\unexpanded\def\syst_boxes_stack_allocate
+ {\newbox\b_syst_boxes_stack
+ \expandafter\let\csname\??boxstack\number\c_syst_boxes_stack\endcsname\b_syst_boxes_stack}
+
+\unexpanded\def\syst_boxes_push#1#2%
+ {\global\advance\c_syst_boxes_stack\plusone
+ \expandafter\let\expandafter\b_syst_boxes_stack\csname\??boxstack\number\c_syst_boxes_stack\endcsname
+ \ifx\b_syst_boxes_stack\relax % cheaper then csname check as in most cases it's defined
+ \syst_boxes_stack_allocate
+ \fi
+ #1\setbox\b_syst_boxes_stack\box#2\relax}
+
+\unexpanded\def\syst_boxes_pop#1#2%
+ {#1\setbox#2\box\csname\??boxstack\number\c_syst_boxes_stack\endcsname
+ \global\advance\c_syst_boxes_stack\minusone}
+
+\unexpanded\def\localpushbox {\syst_boxes_push\relax}
+\unexpanded\def\localpopbox {\syst_boxes_pop \relax}
+
+\unexpanded\def\globalpushbox{\syst_boxes_push\global}
+\unexpanded\def\globalpopbox {\syst_boxes_pop \global}
+
+% \unexpanded\def\pushbox#1{\ctxcommand{pushbox(\number#1)}}
+% \unexpanded\def\popbox #1{\ctxcommand{popbox(\number#1)}}
+
%D \macros
%D {removedepth, obeydepth}
%D
@@ -2449,16 +2568,17 @@
%D \macros
%D {makestrutofbox}
%D
-%D This macro sets the dimensions of a box to those of a strut.
+%D This macro sets the dimensions of a box to those of a strut. Sort of obsolete
+%D so it will go away.
-\def\domakestrutofbox
+\unexpanded\def\makestrutofbox % not used
+ {\afterassignment\syst_boxes_makestrutofbox\c_boxes_register}
+
+\def\syst_boxes_makestrutofbox
{\ht\c_boxes_register\strutht
\dp\c_boxes_register\strutdp
\wd\c_boxes_register\zeropoint}
-\unexpanded\def\makestrutofbox % not used
- {\afterassignment\domakestrutofbox\c_boxes_register}
-
%D \macros
%D {raisebox,lowerbox}
%D
@@ -2567,19 +2687,22 @@
%D \macros
%D {setboxllx,setboxlly,gsetboxllx,gsetboxlly,getboxllx,getboxlly}
%D
-%D A prelude to an extended \TEX:
+%D A prelude to an extended \TEX\ feature:
-\unexpanded\def\setboxllx #1#2{\expandafter\edef\csname boxes_x_\number#1\endcsname{\the\dimexpr#2\relax}}
-\unexpanded\def\setboxlly #1#2{\expandafter\edef\csname boxes_y_\number#1\endcsname{\the\dimexpr#2\relax}}
+\installcorenamespace {box_x}
+\installcorenamespace {box_y}
-\unexpanded\def\gsetboxllx#1#2{\expandafter\xdef\csname boxes_x_\number#1\endcsname{\the\dimexpr#2\relax}}
-\unexpanded\def\gsetboxlly#1#2{\expandafter\xdef\csname boxes_y_\number#1\endcsname{\the\dimexpr#2\relax}}
+\unexpanded\def\setboxllx #1#2{\expandafter\edef\csname\??box_x\number#1\endcsname{\the\dimexpr#2\relax}}
+\unexpanded\def\setboxlly #1#2{\expandafter\edef\csname\??box_y\number#1\endcsname{\the\dimexpr#2\relax}}
-\def\getboxllx#1{\ifcsname boxes_x_\number#1\endcsname\csname boxes_x_\number#1\endcsname\else\zeropoint\fi}
-\def\getboxlly#1{\ifcsname boxes_y_\number#1\endcsname\csname boxes_y_\number#1\endcsname\else\zeropoint\fi}
+\unexpanded\def\gsetboxllx#1#2{\expandafter\xdef\csname\??box_x\number#1\endcsname{\the\dimexpr#2\relax}}
+\unexpanded\def\gsetboxlly#1#2{\expandafter\xdef\csname\??box_y\number#1\endcsname{\the\dimexpr#2\relax}}
-\def\directgetboxllx#1{\csname boxes_x_\number#1\endcsname} % use when sure existence
-\def\directgetboxlly#1{\csname boxes_y_\number#1\endcsname} % use when sure existence
+\def\getboxllx#1{\ifcsname\??box_x\number#1\endcsname\csname\??box_x\number#1\endcsname\else\zeropoint\fi}
+\def\getboxlly#1{\ifcsname\??box_y\number#1\endcsname\csname\??box_y\number#1\endcsname\else\zeropoint\fi}
+
+\def\directgetboxllx#1{\csname\??box_x\number#1\endcsname} % use when sure existence
+\def\directgetboxlly#1{\csname\??box_y\number#1\endcsname} % use when sure existence
%D \macros
%D {shownextbox}
@@ -2647,18 +2770,31 @@
\egroup}
\hbox}
+%D A bit dirty:
+
+% \unexpanded\def\nodestostring#1% \cs {content}
+% {\dowithnextbox{\edef#1{\syst_boxes_nodestostring}}\hbox}
+%
+% \def\syst_boxes_nodestostring
+% {\ctxcommand{boxtostring(\number\nextbox)}}
+
+\unexpanded\def\nodestostring#1#2% more tolerant for #2=\cs
+ {\begingroup
+ \setbox\nextbox\hbox{#2}%
+ \normalexpanded{\endgroup\edef\noexpand#1{\ctxcommand{boxtostring(\number\nextbox)}}}}
+
\protect \endinput
% a bit of test code:
-\hbox \bgroup
- \ruledvbox {\hbox{\strut gans}}
- \ruledvbox to \lineheight {\hbox{\strut gans}}
- \ruledvbox to \lineheight {\hbox {gans}}
- \ruledvbox to \strutheight{\hbox {gans}}
- \ruledvbox to \strutheight{\hbox{\strut gans}}
- \ruledvbox to \strutheight{\vss\hbox{gans}}
-\egroup
+% \hbox \bgroup
+% \ruledvbox {\hbox{\strut gans}}
+% \ruledvbox to \lineheight {\hbox{\strut gans}}
+% \ruledvbox to \lineheight {\hbox {gans}}
+% \ruledvbox to \strutheight{\hbox {gans}}
+% \ruledvbox to \strutheight{\hbox{\strut gans}}
+% \ruledvbox to \strutheight{\vss\hbox{gans}}
+% \egroup
% to be considered
diff --git a/Master/texmf-dist/tex/context/base/supp-mat.mkiv b/Master/texmf-dist/tex/context/base/supp-mat.mkiv
index f77ee3454a3..925f25cc4c3 100644
--- a/Master/texmf-dist/tex/context/base/supp-mat.mkiv
+++ b/Master/texmf-dist/tex/context/base/supp-mat.mkiv
@@ -53,6 +53,36 @@
\let\normalstartdmath \Ustartdisplaymath
\let\normalstopdmath \Ustopdisplaymath
+% \unexpanded\def\Ustartdisplaymath
+% {\ifinner
+% \ifhmode
+% \normalUstartmath
+% \let\Ustopdisplaymath\normalUstopmath
+% \else
+% \normalUstartdisplaymath
+% \let\Ustopdisplaymath\normalUstopdisplaymath
+% \fi
+% \else
+% \normalUstartdisplaymath
+% \let\Ustopdisplaymath\normalUstopdisplaymath
+% \fi}
+
+\unexpanded\def\Ucheckedstartdisplaymath
+ {\ifinner
+ \ifhmode
+ \normalUstartmath
+ \let\Ucheckedstopdisplaymath\normalUstopmath
+ \else
+ \normalUstartdisplaymath
+ \let\Ucheckedstopdisplaymath\normalUstopdisplaymath
+ \fi
+ \else
+ \normalUstartdisplaymath
+ \let\Ucheckedstopdisplaymath\normalUstopdisplaymath
+ \fi}
+
+\let\Ucheckedstopdisplaymath\relax
+
\def\normalmathaligntab{&} % \let\normalmathaligntab\aligntab does to work well in a let to & (a def works ok)
\let\normalsuper \Usuperscript % obsolete
@@ -60,8 +90,8 @@
\let\startimath \Ustartmath
\let\stopimath \Ustopmath
-\let\startdmath \Ustartdisplaymath
-\let\stopdmath \Ustopmath
+\let\startdmath \Ustartdisplaymath % \Ucheckedstartdisplaymath
+\let\stopdmath \Ustopdisplaymath % \Ucheckedstopdisplaymath
\unexpanded\def\mathematics#1{\relax \ifmmode#1\else\normalstartimath#1\normalstopimath\fi}
\unexpanded\def\displaymath#1{\noindent \ifmmode#1\else\normalstartdmath#1\normalstopdmath\fi}
diff --git a/Master/texmf-dist/tex/context/base/symb-ini.lua b/Master/texmf-dist/tex/context/base/symb-ini.lua
index deeef667a67..9586338be12 100644
--- a/Master/texmf-dist/tex/context/base/symb-ini.lua
+++ b/Master/texmf-dist/tex/context/base/symb-ini.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['symb-ini'] = {
license = "see context related readme files"
}
+local context, commands = context, commands
local variables = interfaces.variables
diff --git a/Master/texmf-dist/tex/context/base/syst-aux.lua b/Master/texmf-dist/tex/context/base/syst-aux.lua
index b0fb8483b35..de15428f966 100644
--- a/Master/texmf-dist/tex/context/base/syst-aux.lua
+++ b/Master/texmf-dist/tex/context/base/syst-aux.lua
@@ -16,7 +16,8 @@ local commands, context = commands, context
local settings_to_array = utilities.parsers.settings_to_array
local format = string.format
local utfsub = utf.sub
-local P, C, Carg, lpegmatch, utf8char = lpeg.P, lpeg.C, lpeg.Carg, lpeg.match, lpeg.patterns.utf8char
+local P, S, C, Cc, Cs, Carg, lpegmatch, utf8char = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Carg, lpeg.match, lpeg.patterns.utf8char
+
local setvalue = context.setvalue
@@ -28,6 +29,15 @@ function commands.getfirstcharacter(str)
setvalue("remainingcharacters",rest)
end
+function commands.thefirstcharacter(str)
+ local first, rest = lpegmatch(pattern,str)
+ context(first)
+end
+function commands.theremainingcharacters(str)
+ local first, rest = lpegmatch(pattern,str)
+ context(rest)
+end
+
local pattern = C(utf8char^-1)
function commands.doiffirstcharelse(chr,str)
@@ -68,7 +78,7 @@ end
-- end
-- end
-local pattern = (C((1-P("%"))^1) * Carg(1)) /function(n,d) return format("%.0fsp",d * tonumber(n)/100) end * P("%") * P(-1)
+local pattern = (C((1-P("%"))^1) * Carg(1)) /function(n,d) return format("%.0fsp",d * tonumber(n)/100) end * P("%") * P(-1) -- .0 ?
-- commands.percentageof("10%",65536*10)
@@ -78,3 +88,29 @@ end
-- \gdef\setpercentdimen#1#2%
-- {#1=\ctxcommand{percentageof("#2",\number#1)}\relax}
+
+local spaces = P(" ")^0/""
+
+local pattern = Cs(
+ ( P("global") / "\\global" )^0
+ * spaces
+ * ( P("unexpanded") / "\\unexpanded" )^0
+ * spaces
+ * Cc("\\expandafter\\")
+ * spaces
+ * ( P("expanded") / "e" )^0
+ * spaces
+ * ( P((1-S(" #"))^1) / "def\\csname %0\\endcsname" )
+ * spaces
+ * Cs( (P("##")/"#" + P(1))^0 )
+)
+
+function commands.thetexdefinition(str)
+ context(lpegmatch(pattern,str))
+end
+
+local upper, lower, strip = utf.upper, utf.lower, string.strip
+
+function commands.upper(s) context(upper(s)) end
+function commands.lower(s) context(lower(s)) end
+function commands.strip(s) context(strip(s)) end
diff --git a/Master/texmf-dist/tex/context/base/syst-aux.mkiv b/Master/texmf-dist/tex/context/base/syst-aux.mkiv
index 542b132ae9a..308e4b6fca5 100644
--- a/Master/texmf-dist/tex/context/base/syst-aux.mkiv
+++ b/Master/texmf-dist/tex/context/base/syst-aux.mkiv
@@ -128,10 +128,16 @@
\newif\if!!doned \newif\if!!donee \newif\if!!donef
\def\!!zerocount {0} % alongside \zerocount
-\def\!!minusone {-1} % alongside \minusone
-\def\!!plusone {1} % alongside \plusone
-\def\!!plustwo {2} % alongside \plustwo
-\def\!!plusthree {3} % alongside \plusthree
+\def\!!minusone {-1} % ...
+\def\!!plusone {1} % ...
+\def\!!plustwo {2} % ...
+\def\!!plusthree {3} % ...
+\def\!!plusfour {4} % ...
+\def\!!plusfive {5} % ...
+\def\!!plussix {6} % ...
+\def\!!plusseven {7} % ...
+\def\!!pluseight {8} % ...
+\def\!!plusnine {9} % alongside \plusnine
\setnewconstant \uprotationangle 0
\setnewconstant\rightrotationangle 90
@@ -346,6 +352,12 @@
\let\if_next_blank_space_token\iffalse
\futurelet\nexttoken\syst_helpers_inspect_next_bgroup_character}
+\unexpanded\def\doifnextbgroupcselse#1#2%
+ {\let\m_syst_action_yes#1%
+ \let\m_syst_action_nop#2%
+ \let\if_next_blank_space_token\iffalse
+ \futurelet\nexttoken\syst_helpers_inspect_next_bgroup_character}
+
\def\syst_helpers_inspect_next_bgroup_character
{\ifx\nexttoken\blankspace
\expandafter\syst_helpers_reinspect_next_bgroup_character
@@ -401,7 +413,52 @@
\expandafter\m_syst_action_nop
\fi}
-%D This macro uses some auxiliary macros. Although we were able to program quite
+%D Here's one for skipping spaces and pars, handy for:
+%D
+%D \starttyping
+%D \hbox
+%D
+%D {a few lines later}
+%D \stoptyping
+
+% \unexpanded\def\assumelongusagecs#1%
+% {\let\m_syst_action#1%
+% \futurelet\nexttoken\syst_helpers_ignore_par_character}
+%
+% \def\syst_helpers_ignore_par_character
+% {\ifx\nexttoken\blankspace
+% \expandafter\syst_helpers_ignore_par_character_blankspace
+% \else
+% \expandafter\syst_helpers_ignore_par_character_followup
+% \fi}
+%
+% \def\syst_helpers_ignore_par_character_followup
+% {\ifx\nexttoken\par
+% \expandafter\syst_helpers_ignore_par_partoken
+% \else
+% \expandafter\m_syst_action
+% \fi}
+%
+% \def\syst_helpers_ignore_par_partoken
+% {\afterassignment\m_syst_action\let\nexttoken}
+
+\unexpanded\def\assumelongusagecs#1% can be relaxed when we have long support in \hbox etc
+ {\let\m_syst_action#1%
+ \futurelet\nexttoken\syst_helpers_ignore_spacing}
+
+\def\syst_helpers_ignore_spacing
+ {\ifx\nexttoken\blankspace
+ \singleexpandafter\syst_helpers_ignore_spacing_blankspace
+ \else\ifx\nexttoken\par
+ \doubleexpandafter\syst_helpers_ignore_spacing_partoken
+ \else
+ \doubleexpandafter\m_syst_action
+ \fi\fi}
+
+\def\syst_helpers_ignore_spacing_partoken\par
+ {\futurelet\nexttoken\syst_helpers_ignore_spacing}
+
+%D These macros use some auxiliary macros. Although we were able to program quite
%D complicated things, I only understood these after rereading the \TEX book. The
%D trick is in using a command with a one character name. Such commands differ from
%D the longer ones in the fact that trailing spaces are {\em not} skipped. This
@@ -429,6 +486,9 @@
\def\:{\syst_helpers_reinspect_next_parenthesis_character}
\expandafter\def\: {\let\if_next_blank_space_token\iftrue\futurelet\nexttoken\syst_helpers_inspect_next_parenthesis_character}
+\def\:{\syst_helpers_ignore_spacing_blankspace}
+\expandafter\def\: {\futurelet\nexttoken\syst_helpers_ignore_spacing}
+
\let\:\next
%D \macros
@@ -1243,8 +1303,13 @@
%D complicated arguments, for instance arguments that
%D consist of two or more expandable tokens.
-\def\getfirstcharacter #1{\ctxcommand{getfirstcharacter(\!!bs#1\!!es)}}
-\def\doiffirstcharelse#1#2{\ctxcommand{doiffirstcharelse(\!!bs#1\!!es,\!!bs#2\!!es)}} % chr str
+\let\firstcharacter \empty
+\let\remainingcharacters\empty
+
+\unexpanded\def\getfirstcharacter #1{\ctxcommand{getfirstcharacter(\!!bs#1\!!es)}}
+\unexpanded\def\doiffirstcharelse #1#2{\ctxcommand{doiffirstcharelse(\!!bs#1\!!es,\!!bs#2\!!es)}} % chr str
+\unexpanded\def\thefirstcharacter #1{\ctxcommand{thefirstcharacter(\!!bs#1\!!es)}}
+\unexpanded\def\theremainingcharacters#1{\ctxcommand{theremainingcharacters(\!!bs#1\!!es)}}
%D \macros
%D {doifinstringelse, doifincsnameelse}
@@ -2892,13 +2957,18 @@
%D
%D Trivial:
-\def\letempty #1{\let#1\empty}
-\def\globalletempty#1{\global\let#1\empty}
+\unexpanded\def\letempty #1{\let#1\empty}
+\unexpanded\def\globalletempty#1{\global\let#1\empty}
-\def\letvalueempty #1{\expandafter\let\csname#1\endcsname\empty}
-\def\letgvalueempty#1{\global\expandafter\let\csname#1\endcsname\empty}
-\def\letvaluerelax #1{\expandafter\let\csname#1\endcsname\relax}
-\def\letgvalurelax #1{\global\expandafter\let\csname#1\endcsname\relax}
+\unexpanded\def\letvalueempty #1{\expandafter\let\csname#1\endcsname\empty}
+\unexpanded\def\letgvalueempty#1{\global\expandafter\let\csname#1\endcsname\empty}
+\unexpanded\def\letvaluerelax #1{\expandafter\let\csname#1\endcsname\relax}
+\unexpanded\def\letgvalurelax #1{\global\expandafter\let\csname#1\endcsname\relax}
+
+\unexpanded\def\relaxvalueifundefined#1%
+ {\ifcsname#1\endcsname \else
+ \expandafter\let\csname#1\endcsname\relax
+ \fi}
%D \macros
%D {wait}
@@ -3122,6 +3192,63 @@
\def\s!unexpanded{unexpanded}
+% \bgroup \obeylines
+%
+% \global\let\stoptexdefinition\relax
+%
+% \unexpanded\gdef\starttexdefinition%
+% {\bgroup%
+% \obeylines%
+% \syst_helpers_start_tex_definition}
+%
+% \gdef\syst_helpers_start_tex_definition #1
+% {\catcode\endoflineasciicode\ignorecatcode%
+% \doifinstringelse\letterhash{\detokenize{#1}}\syst_helpers_start_tex_definition_yes\syst_helpers_start_tex_definition_nop#1
+% }
+%
+% \gdef\syst_helpers_start_tex_definition_yes#1 #2
+% {\edef\texdefinitionname{#1}%
+% \ifx\texdefinitionname\s!unexpanded%
+% \expandafter\syst_helpers_start_tex_definition_yes_unexpanded%
+% \else%
+% \expandafter\syst_helpers_start_tex_definition_yes_normal%
+% \fi%
+% {#1}#2
+% }
+%
+% \gdef\syst_helpers_start_tex_definition_yes_unexpanded#1#2 #3
+% #4\stoptexdefinition%
+% {\egroup% #1=unexpanded
+% \unexpanded\expandafter\def\csname#2\endcsname#3{#4}}
+%
+% \gdef\syst_helpers_start_tex_definition_yes_normal#1#2
+% #3\stoptexdefinition%
+% {\egroup%
+% \expandafter\def\csname#1\endcsname#2{#3}}
+%
+% \gdef\syst_helpers_start_tex_definition_nop#1
+% {\syst_helpers_start_tex_definition_nop_indeed{#1}{}}
+%
+% \gdef\syst_helpers_start_tex_definition_nop_indeed#1#2#3\stoptexdefinition%
+% {\egroup%
+% \expandafter\def\csname#1\endcsname{#3}}
+%
+% \egroup
+
+% \starttexdefinition unexpanded test #1
+% [here #1]
+% \stoptexdefinition
+%
+% \starttexdefinition global unexpanded test
+% [here test]
+% \stoptexdefinition
+%
+% \scratchcounter=123
+%
+% \starttexdefinition global unexpanded expanded test #oeps
+% [here #oeps: \the\scratchcounter]
+% \stoptexdefinition
+
\bgroup \obeylines
\global\let\stoptexdefinition\relax
@@ -3129,39 +3256,15 @@
\unexpanded\gdef\starttexdefinition%
{\bgroup%
\obeylines%
- \syst_helpers_start_tex_definition}
+ \syst_helpers_start_tex_definition_one}
-\gdef\syst_helpers_start_tex_definition #1
+\gdef\syst_helpers_start_tex_definition_one#1
{\catcode\endoflineasciicode\ignorecatcode%
- \doifinstringelse\letterhash{\detokenize{#1}}\syst_helpers_start_tex_definition_yes\syst_helpers_start_tex_definition_nop#1
- }
-
-\gdef\syst_helpers_start_tex_definition_yes#1 #2
- {\edef\texdefinitionname{#1}%
- \ifx\texdefinitionname\s!unexpanded%
- \expandafter\syst_helpers_start_tex_definition_yes_unexpanded%
- \else%
- \expandafter\syst_helpers_start_tex_definition_yes_normal%
- \fi%
- {#1}#2
- }
-
-\gdef\syst_helpers_start_tex_definition_yes_unexpanded#1#2 #3
- #4\stoptexdefinition%
- {\egroup% #1=unexpanded
- \unexpanded\expandafter\def\csname#2\endcsname#3{#4}}
-
-\gdef\syst_helpers_start_tex_definition_yes_normal#1#2
- #3\stoptexdefinition%
- {\egroup%
- \expandafter\def\csname#1\endcsname#2{#3}}
-
-\gdef\syst_helpers_start_tex_definition_nop#1
- {\syst_helpers_start_tex_definition_nop_indeed{#1}{}}
+ \syst_helpers_start_tex_definition_two{#1}}
-\gdef\syst_helpers_start_tex_definition_nop_indeed#1#2#3\stoptexdefinition%
+\gdef\syst_helpers_start_tex_definition_two#1#2\stoptexdefinition%
{\egroup%
- \expandafter\def\csname#1\endcsname{#3}}
+ \ctxcommand{thetexdefinition("#1")}{#2}}
\egroup
@@ -4481,7 +4584,6 @@
{\let\dodogotopar#1%
\redogotopar\par}
-
\unexpanded\def\GetPar
{\expanded
{\dowithpar
@@ -5380,7 +5482,7 @@
%D We have to use a two||step implementation, because the
%D expansion has to take place outside \type{\uppercase}.
%D
-%D These might up as \LUA based helpers (i.e. considere these
+%D These might end up as \LUA based helpers (i.e. consider these
%D obsolete:
\unexpanded\def\syst_helpers_do_IF#1#2%
@@ -5802,7 +5904,7 @@
\ifdefined\normalquitvmode \let\dontleavehmode\normalquitvmode \fi
%D \macros
-%D {uppercasestring,lowercasestring}
+%D {utfupper, utflower, uppercasestring, lowercasestring}
%D
%D The names tell what they do:
%D
@@ -5813,14 +5915,20 @@
%D
%D The first argument may be a \type{\macro}.
-\unexpanded\def\uppercasestring#1\to#2%
- {\uppercase\expandafter{\expandafter\dodoglobal\expandafter\edef\expandafter#2\expandafter{\normalexpanded{#1}}}}
-
-\unexpanded\def\lowercasestring#1\to#2%
- {\lowercase\expandafter{\expandafter\dodoglobal\expandafter\edef\expandafter#2\expandafter{\normalexpanded{#1}}}}
+% \unexpanded\def\uppercasestring#1\to#2%
+% {\uppercase\expandafter{\expandafter\dodoglobal\expandafter\edef\expandafter#2\expandafter{\normalexpanded{#1}}}}
+%
+% \unexpanded\def\lowercasestring#1\to#2%
+% {\lowercase\expandafter{\expandafter\dodoglobal\expandafter\edef\expandafter#2\expandafter{\normalexpanded{#1}}}}
%D These macros are sort of obsolete as we never use uppercase this
-%D way.
+%D way. But nevertheless we provide them:
+
+\def\utfupper#1{\ctxcommand{upper(\!!bs#1\!!es)}} % expandable
+\def\utflower#1{\ctxcommand{lower(\!!bs#1\!!es)}} % expandable
+
+\unexpanded\def\uppercasestring#1\to#2{\dodoglobal\edef#2{\ctxcommand{upper(\!!bs#1\!!es)}}}
+\unexpanded\def\lowercasestring#1\to#2{\dodoglobal\edef#2{\ctxcommand{lower(\!!bs#1\!!es)}}}
%D \macros
%D {handletokens}
@@ -6673,11 +6781,31 @@
%D
%D This one is for Mojca Miklavec, who made me aware of the fact that
%D \type {page-imp.tex} was not the best place to hide it.
+%D
+%D \startbuffer
+%D \def\DoSomething#1{ [item #1] }
+%D
+%D \processranges[1,4:5]\DoSomething \par
+%D \dowithrange {1,4:5}\DoSomething \par
+%D \stopbuffer
+%D
+%D \typebuffer \blank \getbuffer \blank
+
+\def\syst_helpers_with_range#1%
+ {\splitstring#1\at:\to\m_syst_helpers_range_from\and\m_syst_helpers_range_to
+ \ifx\m_syst_helpers_range_to\empty\let\m_syst_helpers_range_to\m_syst_helpers_range_from\fi
+ \dostepwiserecurse\m_syst_helpers_range_from\m_syst_helpers_range_to\plusone{\m_helpers_range_action{##1}}}%
+
+\unexpanded\def\processranges[#1]#2% #1= n:m,p,q:r
+ {\def\m_helpers_range_action{#2}%
+ \processcommacommand[#1]\syst_helpers_with_range}
+
+\unexpanded\def\dowithrange#1#2%
+ {\def\m_helpers_range_action{#2}%
+ \processcommacommand[#1]\syst_helpers_with_range}
-\unexpanded\def\dowithrange#1#2% #2 takes number
- {\splitstring#1\at:\to\fromrange\and\torange
- \ifx\torange\empty\let\torange\fromrange\fi
- \dostepwiserecurse\fromrange\torange1{#2{\recurselevel}}}
+% \def\DoSomething#1{ [item #1] }
+% \dowithrange[1,4:5]\DoSomething
%D \macros
%D {ignoreimplicitspaces}
diff --git a/Master/texmf-dist/tex/context/base/syst-con.lua b/Master/texmf-dist/tex/context/base/syst-con.lua
index 48f02da3a5d..dfbd490518e 100644
--- a/Master/texmf-dist/tex/context/base/syst-con.lua
+++ b/Master/texmf-dist/tex/context/base/syst-con.lua
@@ -6,29 +6,39 @@ if not modules then modules = { } end modules ['syst-con'] = {
license = "see context related readme files"
}
-converters = converters or { }
+local tonumber = tonumber
+local utfchar = utf.char
+local gsub, format = string.gsub, string.format
+
+converters = converters or { }
+local converters = converters
+
+local context = context
+local comands = commands
+
+local formatters = string,formatters
--[[ldx--
For raw 8 bit characters, the offset is 0x110000 (bottom of plane 18) at
the top of 's char range but outside the unicode range.
--ldx]]--
-local tonumber = tonumber
-local utfchar = utf.char
-local gsub, format = string.gsub, string.format
+function converters.hexstringtonumber(n) tonumber(n,16) end
+function converters.octstringtonumber(n) tonumber(n, 8) end
-function converters.hexstringtonumber(n) tonumber(n,16) end
-function converters.octstringtonumber(n) tonumber(n, 8) end
function converters.rawcharacter (n) utfchar(0x110000+n) end
-function converters.lchexnumber (n) format("%x" ,n) end
-function converters.uchexnumber (n) format("%X" ,n) end
-function converters.lchexnumbers (n) format("%02x",n) end
-function converters.uchexnumbers (n) format("%02X",n) end
-function converters.octnumber (n) format("%03o",n) end
+
+converters.lchexnumber = formatters["%x" ]
+converters.uchexnumber = formatters["%X" ]
+converters.lchexnumbers = formatters["%02x"]
+converters.uchexnumbers = formatters["%02X"]
+converters.octnumber = formatters["%03o"]
function commands.hexstringtonumber(n) context(tonumber(n,16)) end
function commands.octstringtonumber(n) context(tonumber(n, 8)) end
+
function commands.rawcharacter (n) context(utfchar(0x110000+n)) end
+
function commands.lchexnumber (n) context("%x" ,n) end
function commands.uchexnumber (n) context("%X" ,n) end
function commands.lchexnumbers (n) context("%02x",n) end
@@ -53,10 +63,10 @@ local cos, sin, tan = math.cos, math.sin, math.tan
-- function commands.cos (n) context(cos (n)) end
-- function commands.tan (n) context(tan (n)) end
-function commands.sind(n) context("%0.6f",sind(n)) end
-function commands.cosd(n) context("%0.6f",cosd(n)) end
-function commands.tand(n) context("%0.6f",tand(n)) end
+function commands.sind(n) context("%0.6F",sind(n)) end
+function commands.cosd(n) context("%0.6F",cosd(n)) end
+function commands.tand(n) context("%0.6F",tand(n)) end
-function commands.sin (n) context("%0.6f",sin (n)) end
-function commands.cos (n) context("%0.6f",cos (n)) end
-function commands.tan (n) context("%0.6f",tan (n)) end
+function commands.sin (n) context("%0.6F",sin (n)) end
+function commands.cos (n) context("%0.6F",cos (n)) end
+function commands.tan (n) context("%0.6F",tan (n)) end
diff --git a/Master/texmf-dist/tex/context/base/syst-ini.mkiv b/Master/texmf-dist/tex/context/base/syst-ini.mkiv
index ab1c53131de..fda873d3c5d 100644
--- a/Master/texmf-dist/tex/context/base/syst-ini.mkiv
+++ b/Master/texmf-dist/tex/context/base/syst-ini.mkiv
@@ -246,9 +246,10 @@
% Watch out, for the moment we disable the check for already being defined
% later we will revert this but first all chardefs must be replaced.
-\normalprotected\def\newconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1}
-\normalprotected\def\setnewconstant#1{\ifdefined#1\let#1\undefined\fi\newcount#1#1} % just a number
-\normalprotected\def\setconstant {} % dummy, no checking, so it warns
+\normalprotected\def\newconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1}
+\normalprotected\def\setnewconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1#1} % just a number
+\normalprotected\def\setconstant {} % dummy, no checking, so it warns
+\normalprotected\def\setconstantvalue#1#2{\csname#1\endcsname\numexpr#2\relax}
% maybe setconstant with check
@@ -300,7 +301,7 @@
%D 128-1023 are private and should not be touched.
\let\attributeunsetvalue\c_syst_min_counter_value % used to be \minusone
-\normalprotected\def\newattribute{\syst_basics_allocate\c_syst_min_allocated_attribute\attribute\attributedef\c_syst_max_allocated_register}
+\normalprotected\def\newattribute{\syst_basics_allocate\c_syst_last_allocated_attribute\attribute\attributedef\c_syst_max_allocated_register}
%D Not used by \CONTEXT\ but for instance \PICTEX\ needs it. It's a trick to force
%D strings instead of tokens that take more memory. It's a trick to trick to force
@@ -744,6 +745,9 @@
\normalprotected\def\settrue #1{\let#1\conditionaltrue }
\normalprotected\def\setfalse#1{\let#1\conditionalfalse}
+\normalprotected\def\settruevalue #1{\expandafter\let\csname#1\endcsname\conditionaltrue }
+\normalprotected\def\setfalsevalue#1{\expandafter\let\csname#1\endcsname\conditionalfalse}
+
\let\newconditional\setfalse
\let\ifconditional \ifcase
diff --git a/Master/texmf-dist/tex/context/base/syst-lua.lua b/Master/texmf-dist/tex/context/base/syst-lua.lua
index ef524c339fa..cd7dcc062c5 100644
--- a/Master/texmf-dist/tex/context/base/syst-lua.lua
+++ b/Master/texmf-dist/tex/context/base/syst-lua.lua
@@ -10,43 +10,44 @@ local format, find, match, rep = string.format, string.find, string.match, strin
local tonumber = tonumber
local S, lpegmatch, lpegtsplitat = lpeg.S, lpeg.match, lpeg.tsplitat
-local context = context
+commands = commands or { }
+local commands = commands
-commands = commands or { }
+local context = context
function commands.writestatus(...) logs.status(...) end -- overloaded later
-local firstoftwoarguments = context.firstoftwoarguments -- context.constructcsonly("firstoftwoarguments" )
-local secondoftwoarguments = context.secondoftwoarguments -- context.constructcsonly("secondoftwoarguments")
-local firstofoneargument = context.firstofoneargument -- context.constructcsonly("firstofoneargument" )
-local gobbleoneargument = context.gobbleoneargument -- context.constructcsonly("gobbleoneargument" )
+local ctx_firstoftwoarguments = context.firstoftwoarguments -- context.constructcsonly("firstoftwoarguments" )
+local ctx_secondoftwoarguments = context.secondoftwoarguments -- context.constructcsonly("secondoftwoarguments")
+local ctx_firstofoneargument = context.firstofoneargument -- context.constructcsonly("firstofoneargument" )
+local ctx_gobbleoneargument = context.gobbleoneargument -- context.constructcsonly("gobbleoneargument" )
--- contextsprint(prtcatcodes,[[\ui_fo]]) -- firstofonearguments
--- contextsprint(prtcatcodes,[[\ui_go]]) -- gobbleonearguments
--- contextsprint(prtcatcodes,[[\ui_ft]]) -- firstoftwoarguments
--- contextsprint(prtcatcodes,[[\ui_st]]) -- secondoftwoarguments
+-- contextsprint(prtcatcodes,[[\ui_fo]]) -- ctx_firstofonearguments
+-- contextsprint(prtcatcodes,[[\ui_go]]) -- ctx_gobbleonearguments
+-- contextsprint(prtcatcodes,[[\ui_ft]]) -- ctx_firstoftwoarguments
+-- contextsprint(prtcatcodes,[[\ui_st]]) -- ctx_secondoftwoarguments
function commands.doifelse(b)
if b then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
else
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
end
end
function commands.doif(b)
if b then
- firstofoneargument()
+ ctx_firstofoneargument()
else
- gobbleoneargument()
+ ctx_gobbleoneargument()
end
end
function commands.doifnot(b)
if b then
- gobbleoneargument()
+ ctx_gobbleoneargument()
else
- firstofoneargument()
+ ctx_firstofoneargument()
end
end
@@ -58,9 +59,9 @@ end
function commands.doifelsespaces(str)
if find(str,"^ +$") then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
else
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
end
end
@@ -83,12 +84,12 @@ function commands.doifcommonelse(a,b) -- often the same test
for i=1,na do
for j=1,nb do
if ha[i] == hb[j] then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
return
end
end
end
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
end
function commands.doifinsetelse(a,b)
@@ -96,20 +97,20 @@ function commands.doifinsetelse(a,b)
if not hb then hb = lpegmatch(s,b) h[b] = hb end
for i=1,#hb do
if a == hb[i] then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
return
end
end
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
end
local pattern = lpeg.patterns.validdimen
function commands.doifdimenstringelse(str)
if lpegmatch(pattern,str) then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
else
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
end
end
diff --git a/Master/texmf-dist/tex/context/base/tabl-ntb.mkiv b/Master/texmf-dist/tex/context/base/tabl-ntb.mkiv
index 57134cb53aa..3734e564768 100644
--- a/Master/texmf-dist/tex/context/base/tabl-ntb.mkiv
+++ b/Master/texmf-dist/tex/context/base/tabl-ntb.mkiv
@@ -842,6 +842,7 @@
{\tabl_ntb_table_push
% box not here
\bgroup
+ \pushpostponednodedata
\t_tabl_ntb_head\emptytoks
\t_tabl_ntb_next\emptytoks
\t_tabl_ntb_body\emptytoks
@@ -952,6 +953,7 @@
% \par}%
% \blank
% \fi
+ \poppostponednodedata
\egroup
\tabl_ntb_table_pop}
@@ -1498,7 +1500,6 @@
\fi
\fi}
-
\def\tabl_ntb_check_heights_one
{\dorecurse\c_tabl_ntb_maximum_row
{\c_tabl_ntb_current_row_three\recurselevel\relax
@@ -1517,18 +1518,30 @@
{\scratchdimen\tabl_ntb_get_wid\recurselevel\relax
[\recurselevel:\the\scratchdimen]}}}
+% \def\tabl_ntb_char_align
+% {\doifelse{\naturaltablelocalparameter\c!aligncharacter}\v!yes
+% \tabl_ntb_char_align_indeed\gobbletwoarguments}
+
+% \def\tabl_ntb_char_align_indeed#1#2#3% row column data
+% {\edef\alignmentclass{#2}%
+% \edef\alignmentcharacter{\naturaltablelocalparameter\c!alignmentcharacter}%
+% \ifcase\c_tabl_tbl_pass\or
+% \setfirstpasscharacteralign\checkalignment{#3}% {\strut#2\unskip}%
+% \fi % force hsize, so always a second
+% \setsecondpasscharacteralign \checkalignment{#3}% {\strut#2\unskip}%
+% \ignorespaces}
+
\def\tabl_ntb_char_align
{\doifelse{\naturaltablelocalparameter\c!aligncharacter}\v!yes
- \tabl_ntb_char_align_indeed\gobbleoneargument}
+ \tabl_ntb_char_align_indeed
+ \gobbletwoarguments}
-\def\tabl_ntb_char_align_indeed#1#2% column data
- {\edef\alignmentclass{#1}%
- \edef\alignmentcharacter{\naturaltablelocalparameter\c!alignmentcharacter}%
- \ifcase\c_tabl_tbl_pass\or
- \setfirstpasscharacteralign\checkalignment{#2}% {\strut#2\unskip}%
- \fi % force hsize, so always a second
- \setsecondpasscharacteralign \checkalignment{#2}% {\strut#2\unskip}%
- \ignorespaces}
+\def\tabl_ntb_char_align_indeed#1#2% row column
+ {\ifcase\c_tabl_tbl_pass \or
+ \setcharacteralign{#2}{\naturaltablelocalparameter\c!alignmentcharacter}%
+ \fi
+ \typo_charalign_adapt_font
+ \signalcharacteralign{#2}{#1}}
\unexpanded\def\tabl_ntb_cell_process_a#1#2[#3]#4% grouping added ! ! !
{\bgroup
@@ -1539,7 +1552,7 @@
\tabl_ntb_set_dis{#2}{\the\scratchdimen}%
\fi
\setupcurrentnaturaltablelocal[#3,\c!background=,\c!frame=\v!off]% 25% faster
- \inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#2}{#4}\tabl_ntb_cell_stop\tabl_ntb_cell_finalize}}%
+ \inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#1}{#2}#4\tabl_ntb_cell_stop\tabl_ntb_cell_finalize}}%
\scratchdimen\tabl_ntb_get_wid\c_tabl_ntb_col\relax
\ifdim\wd\scratchbox>\scratchdimen
\ifsqueezeTBLspan
@@ -1625,7 +1638,7 @@
\fi
\fi
\normalexpanded{\tabl_ntb_cell_process_b_c{\ifdim\scratchdimen>\zeropoint \c!width=\the\scratchdimen\fi}}%
- {#1}{#2}[#3]{\tabl_ntb_char_align{#2}{#4}}}
+ {#1}{#2}[#3]{\tabl_ntb_char_align{#1}{#2}#4}}
\unexpanded\def\tabl_ntb_cell_process_c
{\tabl_ntb_cell_process_b_c{}}
@@ -1634,7 +1647,7 @@
{\tabl_ntb_setup_cell{#1}{#2}%
\bgroup
\setupcurrentnaturaltablelocal[#3,\c!width=\d_tabl_ntb_width,\c!background=,\c!frame=\v!off]% 25% faster
- \inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#2}{#4}\tabl_ntb_cell_stop}%
+ \inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#1}{#2}#4\tabl_ntb_cell_stop}%
\egroup}
\unexpanded\def\tabl_ntb_cell_process_e#1#2[#3]#4%
@@ -1647,13 +1660,13 @@
\else
\setupcurrentnaturaltablelocal[\c!color=,\c!width=\d_tabl_ntb_width,\c!height=\d_tabl_ntb_height]%
\fi
- \inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#2}{#4}\tabl_ntb_cell_stop}}%
+ \inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#1}{#2}#4\tabl_ntb_cell_stop}}%
\hskip\tabl_ntb_get_dis{#2}}
\setupTABLE
[\c!frameoffset=.5\linewidth,
\c!backgroundoffset=\v!frame,
- \c!framecolor=\s!black,
+ % \c!framecolor=\s!black,
\c!width=\v!fit,
\c!height=\v!fit,
\c!autowidth=\v!yes,
@@ -1705,7 +1718,7 @@
\setupTABLE
[\c!frameoffset=.5\linewidth,
\c!backgroundoffset=\v!frame,
- \c!framecolor=\s!black,
+ % \c!framecolor=\s!black,
\c!width=\v!fit,
\c!height=\v!fit,
\c!autowidth=\v!yes,
diff --git a/Master/texmf-dist/tex/context/base/tabl-nte.mkiv b/Master/texmf-dist/tex/context/base/tabl-nte.mkiv
index 4a9774cb0f7..af74a2abe9b 100644
--- a/Master/texmf-dist/tex/context/base/tabl-nte.mkiv
+++ b/Master/texmf-dist/tex/context/base/tabl-nte.mkiv
@@ -102,9 +102,9 @@
\unexpanded\def\startTABLEbody{\dosingleempty\tabl_nte_start_body} \let\stopTABLEbody\relax
\unexpanded\def\startTABLEfoot{\dosingleempty\tabl_nte_start_foot} \let\stopTABLEfoot\relax
-\def\tabl_nte_start_head[#1]#2\stopTABLEhead{\appendtoks\doTABLEsection[#1]{#2}\to\TBLhead}
-\def\tabl_nte_start_next[#1]#2\stopTABLEnext{\appendtoks\doTABLEsection[#1]{#2}\to\TBLnext}
-\def\tabl_nte_start_body[#1]#2\stopTABLEbody{\appendtoks\doTABLEsection[#1]{#2}\to\TBLbody}
-\def\tabl_nte_start_foot[#1]#2\stopTABLEfoot{\appendtoks\doTABLEsection[#1]{#2}\to\TBLfoot}
+\def\tabl_nte_start_head[#1]#2\stopTABLEhead{\appendtoks\tabl_ntb_section[#1]{#2}\to\t_tabl_ntb_head}
+\def\tabl_nte_start_next[#1]#2\stopTABLEnext{\appendtoks\tabl_ntb_section[#1]{#2}\to\t_tabl_ntb_next}
+\def\tabl_nte_start_body[#1]#2\stopTABLEbody{\appendtoks\tabl_ntb_section[#1]{#2}\to\t_tabl_ntb_body}
+\def\tabl_nte_start_foot[#1]#2\stopTABLEfoot{\appendtoks\tabl_ntb_section[#1]{#2}\to\t_tabl_ntb_foot}
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/tabl-tbl.lua b/Master/texmf-dist/tex/context/base/tabl-tbl.lua
index 19548e7b3de..b088a10086e 100644
--- a/Master/texmf-dist/tex/context/base/tabl-tbl.lua
+++ b/Master/texmf-dist/tex/context/base/tabl-tbl.lua
@@ -9,21 +9,25 @@ if not modules then modules = { } end modules ['tabl-tbl'] = {
-- A couple of hacks ... easier to do in Lua than in regular TeX. More will
-- follow.
-local context, commands = context, commands
-
local tonumber = tonumber
local gsub, rep, sub, find = string.gsub, string.rep, string.sub, string.find
local P, C, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Ct, lpeg.match
-local settexcount = tex.setcount
+local context = context
+local commands = commands
+
+local texsetcount = tex.setcount
+
+local separator = P("|")
+local nested = lpeg.patterns.nested
+local pattern = Ct((separator * (C(nested) + Cc("")) * C((1-separator)^0))^0)
-local separator = P("|")
-local nested = lpeg.patterns.nested
-local pattern = Ct((separator * (C(nested) + Cc("")) * C((1-separator)^0))^0)
+local ctx_settabulatelastentry = context.settabulatelastentry
+local ctx_settabulateentry = context.settabulateentry
function commands.presettabulate(preamble)
preamble = gsub(preamble,"~","d") -- let's get rid of ~ mess here
- if find(preamble,"%*") then
+ if find(preamble,"*",1,true) then
-- todo: lpeg but not now
preamble = gsub(preamble, "%*(%b{})(%b{})", function(n,p)
return rep(sub(p,2,-2),tonumber(sub(n,2,-2)) or 1)
@@ -31,11 +35,11 @@ function commands.presettabulate(preamble)
end
local t = lpegmatch(pattern,preamble)
local m = #t - 2
- settexcount("global","c_tabl_tabulate_nofcolumns", m/2)
- settexcount("global","c_tabl_tabulate_has_rule_spec_first", t[1] == "" and 0 or 1)
- settexcount("global","c_tabl_tabulate_has_rule_spec_last", t[m+1] == "" and 0 or 1)
+ texsetcount("global","c_tabl_tabulate_nofcolumns", m/2)
+ texsetcount("global","c_tabl_tabulate_has_rule_spec_first", t[1] == "" and 0 or 1)
+ texsetcount("global","c_tabl_tabulate_has_rule_spec_last", t[m+1] == "" and 0 or 1)
for i=1,m,2 do
- context.settabulateentry(t[i],t[i+1])
+ ctx_settabulateentry(t[i],t[i+1])
end
- context.settabulatelastentry(t[m+1])
+ ctx_settabulatelastentry(t[m+1])
end
diff --git a/Master/texmf-dist/tex/context/base/tabl-tbl.mkiv b/Master/texmf-dist/tex/context/base/tabl-tbl.mkiv
index 2fa8c48052a..1aeaa2e56ff 100644
--- a/Master/texmf-dist/tex/context/base/tabl-tbl.mkiv
+++ b/Master/texmf-dist/tex/context/base/tabl-tbl.mkiv
@@ -48,6 +48,7 @@
% p p(dimen) of automatisch als alleen p
% w column width
% f font#1
+% A {alignmentoptions}
% B bold
% I italic
% S slanted
@@ -231,7 +232,8 @@
\unexpanded\def\tolerantTABLEbreaktrue {\settrue \c_tabl_tabulate_tolerant_break} % used in styles !
\unexpanded\def\handletabulatepbreakfalse{\setfalse\c_tabl_tabulate_handlepbreak } % depricated
-\installcorenamespace{tabulatealign}
+\def\noftabulaterows{\number\c_tabl_tabulate_noflines} % handy for testing if a table is empty
+
\installcorenamespace{tabulatebox}
\installcorenamespace{tabulatesetup}
\installcorenamespace{tabulatehook}
@@ -296,14 +298,6 @@
% [|lg{.}|] => \NG 12.34 \NC
-\def\tabl_tabulate_charalign#1 % space delimited ! (will be redone in lua)
- {\edef\alignmentclass{\the\c_tabl_tabulate_column}%
- \edef\alignmentcharacter{\csname\??tabulatealign\the\c_tabl_tabulate_column\endcsname}%
- \ifcase\c_tabl_tabulate_pass\or
- \setfirstpasscharacteralign\checkalignment{#1}%
- \fi % force hsize
- \setsecondpasscharacteralign\checkalignment{#1}}
-
\def\tabl_tabulate_nobreak_inject_tracer
{\red % maybe use the fast color switcher here
\hrule\s!height.5\linewidth\s!depth.5\linewidth
@@ -424,6 +418,8 @@
\let\tabl_tabulate_hook_b\donothing
\let\tabl_tabulate_hook_e\donothing
+\let\tabl_tabulate_hook_g\donothing
+
\def\tabl_tabulate_set_preamble_step#1#2% only makes sense for many tabulates
{\normalexpanded{\t_tabl_tabulate_preamble{\the\t_tabl_tabulate_preamble
\tabl_tabulate_check_local_vrule_thickness\constantdimenargument\d_tabl_tabulate_vrulethickness
@@ -433,12 +429,14 @@
\aligntab
\tabl_tabulate_column_vrule_inject
\tabl_tabulate_color_side_left
- \tabl_tabulate_inject_pre_skip{\the\s_tabl_tabulate_pre}%
+% \tabl_tabulate_inject_pre_skip{\the\s_tabl_tabulate_pre}%
+ \tabl_tabulate_inject_pre_skip{\the\dimexpr\s_tabl_tabulate_pre}% get rid of plus
\alignmark\alignmark
\aligntab
\tabl_tabulate_color_side_both
\global\c_tabl_tabulate_colorspan\zerocount
\global\c_tabl_tabulate_column\constantnumber\c_tabl_tabulate_columns
+ \tabl_tabulate_hook_g
\tabl_tabulate_setups_check % unexpandable
\tabl_tabulate_hook_check % unexpandable
\ifzeropt\d_tabl_tabulate_width
@@ -458,6 +456,9 @@
\bgroup % we cannot combine the if because a cell may have only one ##
\tabl_tabulate_hook_b
\c_tabl_tabulate_align\constantnumber\c_tabl_tabulate_align % needed in tag passing
+ \ifx\m_tabl_tabulate_alignment\empty \else
+ \spac_align_use_now{\m_tabl_tabulate_alignment}%
+ \fi
\noexpand\dostarttagged\noexpand\t!tabulatecell\noexpand\empty
\noexpand\dotagtabulatecell
\noexpand#1%
@@ -496,7 +497,7 @@
\egroup
\aligntab
\noexpand\dostoptagged
- \tabl_tabulate_inject_post_skip{\the\s_tabl_tabulate_post}%
+ \tabl_tabulate_inject_post_skip{\the\dimexpr\s_tabl_tabulate_post}% get rid of plus
\alignmark\alignmark
}}%
\t_tabl_tabulate_dummy\expandafter{\the\t_tabl_tabulate_dummy\NC}%
@@ -554,6 +555,7 @@
\installtabulatepreambleoption{d}{\t_tabl_tabulate_settings\expandafter{\the\t_tabl_tabulate_settings\fixedspaces}%
\tabl_tabulate_set_preamble}
\installtabulatepreambleoption{ }{\tabl_tabulate_set_preamble}
+\installtabulatepreambleoption{A}{\tabl_tabulate_set_alignment}
% We no longer deal with '~' here but map it onto 'd' instead. Of course
% we could prefix a key with \type {\meaning} instead, which works ok (and
@@ -619,10 +621,23 @@
{\setvalue{\??tabulatehook\the\c_tabl_tabulate_columns}{#1}%
\tabl_tabulate_set_preamble}
+% begin of character align plugin
+
+\newconditional\c_tabl_auto_align_mode % reset later
+
+\def\tabl_tabulate_hook_g % partly expanded
+ {\ifconditional\c_tabl_auto_align_mode
+ \signalcharacteralign\c_tabl_tabulate_column{\c_tabl_tabulate_noflines+\plusone}%
+ \typo_charalign_adapt_font
+ \fi}
+
\def\tabl_tabulate_set_align#1%
- {\setvalue{\??tabulatealign\the\c_tabl_tabulate_columns}{#1}%
+ {\global\settrue\c_tabl_auto_align_mode
+ \setcharacteralign\c_tabl_tabulate_columns{#1}%
\tabl_tabulate_set_preamble}
+% end of character align plugin
+
\def\tabl_tabulate_set_before#1%
{\t_tabl_tabulate_before{#1}%
\tabl_tabulate_set_preamble}
@@ -643,6 +658,11 @@
\c_tabl_tabulate_modus\zerocount
\tabl_tabulate_pickup_width}
+\def\tabl_tabulate_set_alignment#1%
+ {\edef\m_tabl_tabulate_alignment{#1}%
+ \spac_align_use_later\m_tabl_tabulate_alignment
+ \tabl_tabulate_set_preamble}
+
\def\tabl_tabulate_set_paragraph
{\doifnextparenthesiselse
{\c_tabl_tabulate_modus\plusone
@@ -718,6 +738,7 @@
\installcorenamespace{tabulatecolorspec}
+\setvalue{\??tabulatecolorspec C}#1{\xdef\m_tabl_tabulate_color {#1}\global\c_tabl_tabulate_colorspan\zerocount}
\setvalue{\??tabulatecolorspec L}#1{\xdef\m_tabl_tabulate_color {#1}\global\c_tabl_tabulate_colorspan\plusone }
\setvalue{\??tabulatecolorspec M}#1{\xdef\m_tabl_tabulate_color {#1}\global\c_tabl_tabulate_colorspan\plustwo }
\setvalue{\??tabulatecolorspec R}#1{\xdef\m_tabl_tabulate_color {#1}\global\c_tabl_tabulate_colorspan\plusthree}
@@ -743,10 +764,12 @@
\t_tabl_tabulate_emath\emptytoks
\t_tabl_tabulate_font\emptytoks
\t_tabl_tabulate_settings\emptytoks
+ \global\let\m_tabl_tabulate_alignment\empty
\global\let\m_tabl_tabulate_color\empty
\global\let\m_tabl_tabulate_text_color\empty
\global\let\m_tabl_tabulate_vrule_color\empty
\global\c_tabl_tabulate_colorspan\zerocount
+ \global\setfalse\c_tabl_auto_align_mode
\global\advance\c_tabl_tabulate_columns\plusone
\expandafter\let\csname\??tabulatesetup\the\c_tabl_tabulate_columns\endcsname\donothing % here ?
\edef\currenttabulationtrulespec{#1}%
@@ -756,7 +779,7 @@
\global\d_tabl_tabulate_vrulethickness\d_tabl_tabulate_vrulethickness_default
\rawprocesscommalist[#1]\tabl_tabulate_set_vrule_command
\fi
- \tabl_tabulate_set_preamble#2\relax\relax % permits i without n
+ \tabl_tabulate_set_preamble#2\relax\relax % permits i without n
\ifcase\c_tabl_tabulate_modus\relax
\tabl_tabulate_set_width_normal
\or % fixed width
@@ -814,6 +837,13 @@
\global\d_tabl_tabulate_splitoff_betweenskip\lastskip
\fi}}
+\installtexdirective
+ {tabulate.linenumbers}
+ {\def\tabl_tabulate_check_linenumbers{\page_postprocessors_linenumbers_deepbox\b_tabl_tabulate}}
+ {\let\tabl_tabulate_check_linenumbers\relax}
+
+\let\tabl_tabulate_check_linenumbers\relax
+
\def\tabl_tabulate_splitoff_box
{\dontcomplain
\global\setbox\b_tabl_tabulate\vsplit\b_tabl_tabulate_current\c_tabl_tabulate_column to \lineheight % % % global ? % % %
@@ -826,6 +856,7 @@
\setbox\b_tabl_tabulate\hbox to \wd\b_tabl_tabulate
{\hss\tabl_tabulate_hook_yes{\box\b_tabl_tabulate}\hss}%
\tabl_tabulate_normalize_splitline
+ \tabl_tabulate_check_linenumbers
\box\b_tabl_tabulate}
\unexpanded\def\tabl_tabulate_hook_nop
@@ -833,8 +864,7 @@
\let\tabl_tabulate_hook\tabl_tabulate_hook_nop
-\def\tabl_tabulate_hook_yes {\csname\??tabulatehook \the\c_tabl_tabulate_column\endcsname}
-\def\tabl_tabulate_align_yes{\csname\??tabulatealign\the\c_tabl_tabulate_column\endcsname} % to be used
+\def\tabl_tabulate_hook_yes{\csname\??tabulatehook\the\c_tabl_tabulate_column\endcsname}
\def\tabl_tabulate_pheight_reset
{\global\c_tabl_tabulate_plines_min\plusone
@@ -1046,8 +1076,8 @@
\tabulatenoalign{\kern-\lineheight}%
\fi}
-\setuvalue{\e!start\v!tabulatehead}{\doifnextoptionalelse\tabl_tabulate_start_head_yes\tabl_tabulate_start_head_nop}
-\setuvalue{\e!start\v!tabulatetail}{\doifnextoptionalelse\tabl_tabulate_start_foot_yes\tabl_tabulate_start_foot_nop}
+\setuvalue{\e!start\v!tabulatehead}{\doifnextoptionalcselse\tabl_tabulate_start_head_yes\tabl_tabulate_start_head_nop}
+\setuvalue{\e!start\v!tabulatetail}{\doifnextoptionalcselse\tabl_tabulate_start_foot_yes\tabl_tabulate_start_foot_nop}
\let\m_tabl_tabulate_data\empty
@@ -1067,7 +1097,7 @@
% {\bgroup
% \edef\currenttabulationparent{#1}%
% \let\currenttabulation\currenttabulationparent
-% \doifnextoptionalelse\tabl_start_defined_yes\tabl_start_defined_nop}
+% \doifnextoptionalcselse\tabl_start_defined_yes\tabl_start_defined_nop}
%
% \def\tabl_start_defined_yes[#1]%
% {\edef\currenttabulation{\currenttabulation:#1}%
@@ -1684,7 +1714,8 @@
\unexpanded\def\tabl_tabulate_RQ_first{\tabl_tabulate_column_equal \plusone}
\unexpanded\def\tabl_tabulate_HQ_first{\tabl_tabulate_column_equal \plustwo}
-\unexpanded\def\tabl_tabulate_NG_first{\NC\tabl_tabulate_charalign}
+%unexpanded\def\tabl_tabulate_NG_first{\NC\tabl_tabulate_charalign}
+\unexpanded\def\tabl_tabulate_NG_first{\NC}
\unexpanded\def\tabl_tabulate_NN_first{\NC\tabl_tabulate_digits} % new, undocumented, test first
\unexpanded\def\tabl_tabulate_ND_first{\NC\tabl_tabulate_digits} % same, for old times sake
@@ -1911,7 +1942,7 @@
\tabl_tabulate_nobreak_inject
\stoptabulatenoalign}
-\let\tabl_tabulate_BL_second\tabl_tabulate_TL_second
+\let\tabl_tabulate_BL_second_indeed\tabl_tabulate_TL_second_indeed
\def\tabl_tabulate_HL_second
{\csname
@@ -2004,6 +2035,8 @@
\let\tabl_tabulate_flush_collected \empty
\let\tabl_tabulate_flush_collected_indeed\empty
+\let\v_tabl_tabulate_align\!!zerocount
+
\def\tabl_tabulate_set_local_hsize
{\setlocalhsize
\hsize\localhsize}
diff --git a/Master/texmf-dist/tex/context/base/tabl-xtb.lua b/Master/texmf-dist/tex/context/base/tabl-xtb.lua
index 3ffe8a2196a..d9daefe69be 100644
--- a/Master/texmf-dist/tex/context/base/tabl-xtb.lua
+++ b/Master/texmf-dist/tex/context/base/tabl-xtb.lua
@@ -25,19 +25,22 @@ this mechamism will be improved so that it can replace its older cousin.
-- todo: use linked list instead of r/c array
-local commands, context, tex, node = commands, context, tex, node
+local tonumber = tonumber
-local texdimen = tex.dimen
-local texcount = tex.count
-local texbox = tex.box
-local texsetcount = tex.setcount
-local texsetdimen = tex.setdimen
+local commands = commands
+local context = context
+local tex = tex
-local format = string.format
-local concat = table.concat
-local points = number.points
+local texgetcount = tex.getcount
+local texsetcount = tex.setcount
+local texgetdimen = tex.getdimen
+local texsetdimen = tex.setdimen
+local texget = tex.get
+
+local format = string.format
+local concat = table.concat
+local points = number.points
-local context = context
local context_beginvbox = context.beginvbox
local context_endvbox = context.endvbox
local context_blank = context.blank
@@ -48,13 +51,23 @@ local variables = interfaces.variables
local setmetatableindex = table.setmetatableindex
local settings_to_hash = utilities.parsers.settings_to_hash
-local copy_node_list = node.copy_list
-local hpack_node_list = node.hpack
-local vpack_node_list = node.vpack
-local slide_node_list = node.slide
-local flush_node_list = node.flush_list
+local nuts = nodes.nuts -- here nuts gain hardly nothing
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getlist = nuts.getlist
+local getfield = nuts.getfield
+local getbox = nuts.getbox
-local nodepool = nodes.pool
+local setfield = nuts.setfield
+
+local copy_node_list = nuts.copy_list
+local hpack_node_list = nuts.hpack
+local flush_node_list = nuts.flush_list
+
+local nodepool = nuts.pool
local new_glue = nodepool.glue
local new_kern = nodepool.kern
@@ -171,11 +184,11 @@ function xtables.create(settings)
settings.leftmargindistance = tonumber(settings.leftmargindistance) or 0
settings.rightmargindistance = tonumber(settings.rightmargindistance) or 0
settings.options = settings_to_hash(settings.option)
- settings.textwidth = tonumber(settings.textwidth) or tex.hsize
- settings.lineheight = tonumber(settings.lineheight) or texdimen.lineheight
+ settings.textwidth = tonumber(settings.textwidth) or texget("hsize")
+ settings.lineheight = tonumber(settings.lineheight) or texgetdimen("lineheight")
settings.maxwidth = tonumber(settings.maxwidth) or settings.textwidth/8
-- if #stack > 0 then
- -- settings.textwidth = tex.hsize
+ -- settings.textwidth = texget("hsize")
-- end
data.criterium_v = 2 * data.settings.lineheight
data.criterium_h = .75 * data.settings.textwidth
@@ -186,10 +199,10 @@ function xtables.initialize_reflow_width(option)
local r = data.currentrow
local c = data.currentcolumn + 1
local drc = data.rows[r][c]
- drc.nx = texcount.c_tabl_x_nx
- drc.ny = texcount.c_tabl_x_ny
+ drc.nx = texgetcount("c_tabl_x_nx")
+ drc.ny = texgetcount("c_tabl_x_ny")
local distances = data.distances
- local distance = texdimen.d_tabl_x_distance
+ local distance = texgetdimen("d_tabl_x_distance")
if distance > distances[c] then
distances[c] = distance
end
@@ -214,25 +227,25 @@ function xtables.set_reflow_width()
while row[c].span do -- can also be previous row ones
c = c + 1
end
- local tb = texbox.b_tabl_x
+ local tb = getbox("b_tabl_x")
local drc = row[c]
--
drc.list = true -- we don't need to keep the content around as we're in trial mode (no: copy_node_list(tb))
--
- local widths, width = data.widths, tb.width
+ local widths, width = data.widths, getfield(tb,"width")
if width > widths[c] then
widths[c] = width
end
- local heights, height = data.heights, tb.height
+ local heights, height = data.heights, getfield(tb,"height")
if height > heights[r] then
heights[r] = height
end
- local depths, depth = data.depths, tb.depth
+ local depths, depth = data.depths, getfield(tb,"depth")
if depth > depths[r] then
depths[r] = depth
end
--
- local dimensionstate = texcount.frameddimensionstate
+ local dimensionstate = texgetcount("frameddimensionstate")
local fixedcolumns = data.fixedcolumns
local fixedrows = data.fixedrows
if dimensionstate == 1 then
@@ -294,19 +307,19 @@ function xtables.initialize_reflow_height()
for x=1,drc.nx-1 do
w = w + widths[c+x]
end
- texdimen.d_tabl_x_width = w
+ texsetdimen("d_tabl_x_width",w)
local dimensionstate = drc.dimensionstate or 0
if dimensionstate == 1 or dimensionstate == 3 then
-- width was fixed so height is known
- texcount.c_tabl_x_skip_mode = 1
+ texsetcount("c_tabl_x_skip_mode",1)
elseif dimensionstate == 2 then
-- height is enforced
- texcount.c_tabl_x_skip_mode = 1
+ texsetcount("c_tabl_x_skip_mode",1)
elseif data.autowidths[c] then
-- width has changed so we need to recalculate the height
- texcount.c_tabl_x_skip_mode = 0
+ texsetcount("c_tabl_x_skip_mode",0)
else
- texcount.c_tabl_x_skip_mode = 1
+ texsetcount("c_tabl_x_skip_mode",1)
end
end
@@ -315,23 +328,23 @@ function xtables.set_reflow_height()
local c = data.currentcolumn
local rows = data.rows
local row = rows[r]
--- while row[c].span do -- we could adapt drc.nx instead
--- c = c + 1
--- end
- local tb = texbox.b_tabl_x
+ -- while row[c].span do -- we could adapt drc.nx instead
+ -- c = c + 1
+ -- end
+ local tb = getbox("b_tabl_x")
local drc = row[c]
if data.fixedrows[r] == 0 then -- and drc.dimensionstate < 2
- local heights, height = data.heights, tb.height
+ local heights, height = data.heights, getfield(tb,"height")
if height > heights[r] then
heights[r] = height
end
- local depths, depth = data.depths, tb.depth
+ local depths, depth = data.depths, getfield(tb,"depth")
if depth > depths[r] then
depths[r] = depth
end
end
--- c = c + drc.nx - 1
--- data.currentcolumn = c
+ -- c = c + drc.nx - 1
+ -- data.currentcolumn = c
end
function xtables.initialize_construct()
@@ -357,9 +370,9 @@ function xtables.initialize_construct()
h = h + heights[r+y]
d = d + depths[r+y]
end
- texdimen.d_tabl_x_width = w
- texdimen.d_tabl_x_height = h + d
- texdimen.d_tabl_x_depth = 0
+ texsetdimen("d_tabl_x_width",w)
+ texsetdimen("d_tabl_x_height",h + d)
+ texsetdimen("d_tabl_x_depth",0)
end
function xtables.set_construct()
@@ -367,14 +380,14 @@ function xtables.set_construct()
local c = data.currentcolumn
local rows = data.rows
local row = rows[r]
--- while row[c].span do -- can also be previous row ones
--- c = c + 1
--- end
+ -- while row[c].span do -- can also be previous row ones
+ -- c = c + 1
+ -- end
local drc = row[c]
-- this will change as soon as in luatex we can reset a box list without freeing
- drc.list = copy_node_list(texbox.b_tabl_x)
--- c = c + drc.nx - 1
--- data.currentcolumn = c
+ drc.list = copy_node_list(getbox("b_tabl_x"))
+ -- c = c + drc.nx - 1
+ -- data.currentcolumn = c
end
local function showwidths(where,widths,autowidths)
@@ -559,8 +572,8 @@ function xtables.reflow_height()
local total = totalheight + totaldepth
local leftover = settings.textheight - total
if leftover > 0 then
- local leftheight = (totalheight / total ) * leftover / #heights
- local leftdepth = (totaldepth / total ) * leftover / #depths
+ local leftheight = (totalheight / total) * leftover / #heights
+ local leftdepth = (totaldepth / total) * leftover / #depths
for i=1,nofrows do
heights[i] = heights[i] + leftheight
depths [i] = depths [i] + leftdepth
@@ -645,23 +658,23 @@ function xtables.construct()
end
local list = drc.list
if list then
- list.shift = list.height + list.depth
+ setfield(list,"shift",getfield(list,"height") + getfield(list,"depth"))
-- list = hpack_node_list(list) -- is somehow needed
- -- list.width = 0
- -- list.height = 0
- -- list.depth = 0
+ -- setfield(list,"width",0)
+ -- setfield(list,"height",0)
+ -- setfield(list,"depth",0)
-- faster:
local h = new_hlist()
- h.list = list
+ setfield(h,"list",list)
list = h
--
if start then
- stop.next = list
- list.prev = stop
+ setfield(stop,"next",list)
+ setfield(list,"prev",stop)
else
start = list
end
- stop = list -- one node anyway, so not needed: slide_node_list(list)
+ stop = list
end
local step = widths[c]
if c < nofcolumns then
@@ -669,8 +682,8 @@ function xtables.construct()
end
local kern = new_kern(step)
if stop then
- stop.prev = kern
- stop.next = kern
+ setfield(stop,"next",kern)
+ setfield(kern,"prev",stop)
else -- can be first spanning next row (ny=...)
start = kern
end
@@ -679,8 +692,8 @@ function xtables.construct()
if start then
if rightmargindistance > 0 then
local kern = new_kern(rightmargindistance)
- stop.next = kern
- kern.prev = stop
+ setfield(stop,"next",kern)
+ setfield(kern,"prev",stop)
-- stop = kern
end
return start, heights[r] + depths[r], hasspan
@@ -720,10 +733,12 @@ function xtables.construct()
texsetdimen("global","d_tabl_x_final_width",0)
else
texsetcount("global","c_tabl_x_state",1)
- texsetdimen("global","d_tabl_x_final_width",body[1][1].width)
+ texsetdimen("global","d_tabl_x_final_width",getfield(body[1][1],"width"))
end
end
+-- todo: join as that is as efficient as fushing multiple
+
local function inject(row,copy,package)
local list = row[1]
if copy then
@@ -731,8 +746,8 @@ local function inject(row,copy,package)
end
if package then
context_beginvbox()
- context(list)
- context(new_kern(row[2]))
+ context(tonode(list))
+ context(tonode(new_kern(row[2])))
context_endvbox()
context_nointerlineskip() -- figure out a better way
if row[4] then
@@ -740,13 +755,13 @@ local function inject(row,copy,package)
elseif row[3] then
context_blank(row[3] .. "sp") -- why blank ?
else
- context(new_glue(0))
+ context(tonode(new_glue(0)))
end
else
- context(list)
- context(new_kern(row[2]))
+ context(tonode(list))
+ context(tonode(new_kern(row[2])))
if row[3] then
- context(new_glue(row[3]))
+ context(tonode(new_glue(row[3])))
end
end
end
@@ -819,7 +834,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i],repeatheader)
end
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
if not repeatheader then
results[head_mode] = { }
@@ -832,7 +847,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(more[i],true)
end
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
end
elseif headsize > 0 and repeatheader then -- following chunk gets head
@@ -842,7 +857,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i],true)
end
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
end
else -- following chunk gets nothing
@@ -869,7 +884,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
-- all is flushed and footer fits
if footsize > 0 then
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
for i=1,#foot do
inject(foot[i])
@@ -883,7 +898,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
-- todo: try to flush a few more lines
if repeatfooter and footsize > 0 then
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
for i=1,#foot do
inject(foot[i],true)
@@ -935,13 +950,13 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i])
end
if #head > 0 and rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
for i=1,#body do
inject(body[i])
end
if #foot > 0 and rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
for i=1,#foot do
inject(foot[i])
@@ -961,12 +976,30 @@ function xtables.cleanup()
flush_node_list(r[1])
end
end
+
+ -- local rows = data.rows
+ -- for i=1,#rows do
+ -- local row = rows[i]
+ -- for i=1,#row do
+ -- local cell = row[i]
+ -- local list = cell.list
+ -- if list then
+ -- cell.width = getfield(list,"width")
+ -- cell.height = getfield(list,"height")
+ -- cell.depth = getfield(list,"depth")
+ -- cell.list = true
+ -- end
+ -- end
+ -- end
+ -- data.result = nil
+ -- inspect(data)
+
data = table.remove(stack)
end
function xtables.next_row()
local r = data.currentrow + 1
- data.modes[r] = texcount.c_tabl_x_mode
+ data.modes[r] = texgetcount("c_tabl_x_mode")
data.currentrow = r
data.currentcolumn = 0
end
@@ -986,3 +1019,6 @@ commands.x_table_init_construct = xtables.initialize_construct
commands.x_table_set_reflow_width = xtables.set_reflow_width
commands.x_table_set_reflow_height = xtables.set_reflow_height
commands.x_table_set_construct = xtables.set_construct
+
+commands.x_table_r = function() context(data.currentrow or 0) end
+commands.x_table_c = function() context(data.currentcolumn or 0) end
diff --git a/Master/texmf-dist/tex/context/base/tabl-xtb.mkvi b/Master/texmf-dist/tex/context/base/tabl-xtb.mkvi
index aba4e502718..cca56dbee53 100644
--- a/Master/texmf-dist/tex/context/base/tabl-xtb.mkvi
+++ b/Master/texmf-dist/tex/context/base/tabl-xtb.mkvi
@@ -94,9 +94,11 @@
\newdimen\d_tabl_x_final_width
\newcount\c_tabl_x_nesting
\newcount\c_tabl_x_skip_mode % 1 = skip
-
\newdimen\d_tabl_x_textwidth
+\def\currentxtablerow {\ctxcommand{x_table_r()}}
+\def\currentxtablecolumn{\ctxcommand{x_table_c()}}
+
% \setupxtable[one][parent][a=b,c=d]
% \setupxtable[one] [a=b,c=d]
% \setupxtable [a=b,c=d]
@@ -151,6 +153,7 @@
\let\stopxtable\relax
\def\tabl_x_default_buffer{x_table_\number\c_tabl_x_nesting}
+\let\tabl_x_current_buffer\empty
\unexpanded\def\tabl_x_start_table[#settings]% maybe two arguments: [tag][settings] | [tag] | [settings]
{\bgroup
@@ -399,7 +402,7 @@
\unexpanded\def\startxrow
{\begingroup
- \doifnextoptionalelse\tabl_x_start_row_yes\tabl_x_start_row_nop}
+ \doifnextoptionalcselse\tabl_x_start_row_yes\tabl_x_start_row_nop}
\unexpanded\def\tabl_x_start_row_reflow_width_yes[#settings]%
{\setupcurrentxtable[#settings]%
@@ -432,7 +435,7 @@
\endgroup}
\unexpanded\def\startxcell
- {\doifnextoptionalelse\tabl_x_start_cell_yes\tabl_x_start_cell_nop}
+ {\doifnextoptionalcselse\tabl_x_start_cell_yes\tabl_x_start_cell_nop}
\unexpanded\def\stopxcell
{\tabl_x_stop_cell}
@@ -674,7 +677,7 @@
\unexpanded\def\startxgroup
{\begingroup
- \doifnextoptionalelse\tabl_x_start_group_delayed_one\relax}
+ \doifnextoptionalcselse\tabl_x_start_group_delayed_one\relax}
\unexpanded\def\stopxgroup
{\endgroup}
@@ -692,7 +695,7 @@
\chaintocurrentxtable{#tag}%
\fi
\edef\currentxtable{#tag}%
- \doifnextoptionalelse\setupcurrentxtable\relax}
+ \doifnextoptionalcselse\setupcurrentxtable\relax}
\let\startxrowgroup \startxgroup
\let\stopxrowgroup \stopxgroup
@@ -703,7 +706,7 @@
\unexpanded\def\startxcell
{\begingroup
- \doifnextoptionalelse\tabl_x_start_cell_delayed_one\tabl_x_start_cell_nop}
+ \doifnextoptionalcselse\tabl_x_start_cell_delayed_one\tabl_x_start_cell_nop}
\unexpanded\def\tabl_x_start_cell_delayed_one[#tag]%
% {\ifcsname\namedxtablehash{#tag}\s!parent\endcsname
@@ -718,7 +721,7 @@
\chaintocurrentxtable{#tag}%
\fi
\edef\currentxtable{#tag}%
- \doifnextoptionalelse\tabl_x_start_cell_yes\tabl_x_start_cell_nop}
+ \doifnextoptionalcselse\tabl_x_start_cell_yes\tabl_x_start_cell_nop}
\unexpanded\def\stopxcell
{\tabl_x_stop_cell
@@ -728,7 +731,7 @@
\unexpanded\def\startxrow
{\begingroup
- \doifnextoptionalelse\tabl_x_start_row_delayed_one\tabl_x_start_row_nop}
+ \doifnextoptionalcselse\tabl_x_start_row_delayed_one\tabl_x_start_row_nop}
\unexpanded\def\tabl_x_start_row_delayed_one[#tag]%
% {\ifcsname\namedxtablehash{#tag}\s!parent\endcsname
@@ -743,7 +746,7 @@
\chaintocurrentxtable{#tag}%
\fi
\edef\currentxtable{#tag}%
- \doifnextoptionalelse\tabl_x_start_row_yes\tabl_x_start_row_nop}
+ \doifnextoptionalcselse\tabl_x_start_row_yes\tabl_x_start_row_nop}
\unexpanded\def\stopxrow
{\tabl_x_stop_row
diff --git a/Master/texmf-dist/tex/context/base/task-ini.lua b/Master/texmf-dist/tex/context/base/task-ini.lua
index 0f477cb6eb3..75ce08232bc 100644
--- a/Master/texmf-dist/tex/context/base/task-ini.lua
+++ b/Master/texmf-dist/tex/context/base/task-ini.lua
@@ -12,13 +12,19 @@ if not modules then modules = { } end modules ['task-ini'] = {
-- we can disable more handlers and enable then when really used (*)
--
-- todo: two finalizers: real shipout (can be imposed page) and page shipout (individual page)
+--
+-- todo: consider moving the kernel kerning/ligaturing functions in the main font loop because
+-- there we know if they are needed; doesn't save time but; if we overload unh* commands to
+-- not apply the font handler, we can remove all checks for subtypes 255
local tasks = nodes.tasks
+local prependaction = tasks.prependaction
local appendaction = tasks.appendaction
local disableaction = tasks.disableaction
local freezegroup = tasks.freezegroup
local freezecallbacks = callbacks.freeze
+
appendaction("processors", "normalizers", "typesetters.characters.handler") -- always on
appendaction("processors", "normalizers", "fonts.collections.process") -- disabled
appendaction("processors", "normalizers", "fonts.checkers.missing") -- disabled
@@ -31,8 +37,12 @@ appendaction("processors", "characters", "typesetters.cases.handler")
appendaction("processors", "characters", "typesetters.breakpoints.handler") -- disabled
appendaction("processors", "characters", "scripts.injectors.handler") -- disabled
+appendaction("processors", "words", "languages.replacements.handler") -- disabled
appendaction("processors", "words", "builders.kernel.hyphenation") -- always on
-appendaction("processors", "words", "languages.words.check") -- disabled
+appendaction("processors", "words", "languages.words.check") -- disabled -- might move up, no disc check needed then
+
+appendaction("processors", "words", "typesetters.initials.handler") -- disabled -- might move up
+appendaction("processors", "words", "typesetters.firstlines.handler") -- disabled -- might move up
appendaction("processors", "fonts", "builders.paragraphs.solutions.splitters.split") -- experimental
appendaction("processors", "fonts", "nodes.handlers.characters") -- maybe todo
@@ -43,12 +53,14 @@ appendaction("processors", "fonts", "builders.kernel.kerning")
appendaction("processors", "fonts", "nodes.handlers.stripping") -- disabled (might move)
------------("processors", "fonts", "typesetters.italics.handler") -- disabled (after otf/kern handling)
+appendaction("processors", "lists", "typesetters.characteralign.handler") -- disabled (we need to to this after otf appliance)
appendaction("processors", "lists", "typesetters.spacings.handler") -- disabled
appendaction("processors", "lists", "typesetters.kerns.handler") -- disabled
appendaction("processors", "lists", "typesetters.digits.handler") -- disabled (after otf handling)
appendaction("processors", "lists", "typesetters.italics.handler") -- disabled (after otf/kern handling)
-appendaction("processors", "lists", "typesetters.paragraphs.handler") -- disabled
+------------("processors", "lists", "typesetters.initials.handler") -- disabled
+appendaction("shipouts", "normalizers", "builders.paragraphs.expansion.trace") -- disabled
appendaction("shipouts", "normalizers", "nodes.handlers.cleanuppage") -- disabled
appendaction("shipouts", "normalizers", "typesetters.alignments.handler")
appendaction("shipouts", "normalizers", "nodes.references.handler") -- disabled
@@ -71,6 +83,8 @@ appendaction("shipouts", "finishers", "attributes.viewerlayers.handler")
--maybe integrate relocate and families
+appendaction("math", "normalizers", "noads.handlers.showtree", nil, "nohead")
+
appendaction("math", "normalizers", "noads.handlers.unscript", nil, "nohead") -- always on (maybe disabled)
appendaction("math", "normalizers", "noads.handlers.variants", nil, "nohead") -- always on
appendaction("math", "normalizers", "noads.handlers.relocate", nil, "nohead") -- always on
@@ -83,9 +97,11 @@ appendaction("math", "normalizers", "noads.handlers.resize", nil, "noh
appendaction("math", "normalizers", "noads.handlers.check", nil, "nohead") -- always on
appendaction("math", "normalizers", "noads.handlers.tags", nil, "nohead") -- disabled
appendaction("math", "normalizers", "noads.handlers.italics", nil, "nohead") -- disabled
+appendaction("math", "normalizers", "noads.handlers.classes", nil, "nohead") -- disabled
appendaction("math", "builders", "builders.kernel.mlist_to_hlist") -- always on
------------("math", "builders", "noads.handlers.italics", nil, "nohead") -- disabled
+appendaction("math", "builders", "typesetters.directions.processmath") -- disabled (has to happen pretty late)
-- quite experimental (nodes.handlers.graphicvadjust might go away)
@@ -103,11 +119,18 @@ appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler")
-- experimental too
-appendaction("mvlbuilders","normalizers","typesetters.checkers.handler")
-appendaction("vboxbuilders","normalizers","typesetters.checkers.handler")
+appendaction("mvlbuilders", "normalizers", "typesetters.checkers.handler")
+appendaction("vboxbuilders", "normalizers", "typesetters.checkers.handler")
+
+-- rather special (this might get hardcoded):
+
+prependaction("processors", "before", "nodes.properties.attach") -- enabled but optimized for quick abort
+appendaction ("shipouts", "normalizers", "nodes.properties.delayed") -- enabled but optimized for quick abort
-- speedup: only kick in when used
+disableaction("processors", "languages.replacements.handler")
+disableaction("processors", "typesetters.characteralign.handler")
disableaction("processors", "scripts.autofontfeature.handler")
disableaction("processors", "scripts.splitters.handler")
disableaction("processors", "scripts.injectors.handler") -- was enabled
@@ -120,12 +143,14 @@ disableaction("processors", "typesetters.digits.handler")
disableaction("processors", "typesetters.breakpoints.handler")
disableaction("processors", "typesetters.directions.handler")
disableaction("processors", "languages.words.check")
+disableaction("processors", "typesetters.initials.handler")
+disableaction("processors", "typesetters.firstlines.handler")
disableaction("processors", "typesetters.spacings.handler")
disableaction("processors", "typesetters.kerns.handler")
disableaction("processors", "typesetters.italics.handler")
disableaction("processors", "nodes.handlers.stripping")
-disableaction("processors", "typesetters.paragraphs.handler")
+disableaction("shipouts", "builders.paragraphs.expansion.trace")
disableaction("shipouts", "typesetters.alignments.handler")
disableaction("shipouts", "nodes.rules.handler")
disableaction("shipouts", "nodes.shifts.handler")
@@ -156,8 +181,11 @@ disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize")
disableaction("finalizers", "nodes.handlers.graphicvadjust") -- sort of obsolete
disableaction("finalizers", "builders.paragraphs.tag")
+disableaction("math", "noads.handlers.showtree")
disableaction("math", "noads.handlers.tags")
disableaction("math", "noads.handlers.italics")
+disableaction("math", "noads.handlers.classes")
+disableaction("math", "typesetters.directions.processmath")
disableaction("mvlbuilders", "typesetters.checkers.handler")
disableaction("vboxbuilders","typesetters.checkers.handler")
diff --git a/Master/texmf-dist/tex/context/base/toks-ini.lua b/Master/texmf-dist/tex/context/base/toks-ini.lua
index ef4b5406b66..0f0c016f8e9 100644
--- a/Master/texmf-dist/tex/context/base/toks-ini.lua
+++ b/Master/texmf-dist/tex/context/base/toks-ini.lua
@@ -5,6 +5,7 @@ if not modules then modules = { } end modules ['toks-ini'] = {
license = "see context related readme files"
}
+local context, commands = context, commands
local utfbyte, utfchar, utfvalues = utf.byte, utf.char, utf.values
local format, gsub = string.format, string.gsub
diff --git a/Master/texmf-dist/tex/context/base/trac-deb.lua b/Master/texmf-dist/tex/context/base/trac-deb.lua
index fe167c343e2..af4f7c643a0 100644
--- a/Master/texmf-dist/tex/context/base/trac-deb.lua
+++ b/Master/texmf-dist/tex/context/base/trac-deb.lua
@@ -9,22 +9,28 @@ if not modules then modules = { } end modules ['trac-deb'] = {
local lpeg, status = lpeg, status
local lpegmatch = lpeg.match
-local format, concat, match = string.format, table.concat, string.match
+local format, concat, match, find = string.format, table.concat, string.match, string.find
local tonumber, tostring = tonumber, tostring
-local texdimen, textoks, texcount = tex.dimen, tex.toks, tex.count
-- maybe tracers -> tracers.tex (and tracers.lua for current debugger)
-local report_system = logs.reporter("system","tex")
+----- report_tex = logs.reporter("tex error")
+----- report_lua = logs.reporter("lua error")
+local report_nl = logs.newline
+local report_str = logs.writer
-tracers = tracers or { }
-local tracers = tracers
+tracers = tracers or { }
+local tracers = tracers
-tracers.lists = { }
-local lists = tracers.lists
+tracers.lists = { }
+local lists = tracers.lists
-tracers.strings = { }
-local strings = tracers.strings
+tracers.strings = { }
+local strings = tracers.strings
+
+local texgetdimen = tex.getdimen
+local texgettoks = tex.gettoks
+local texgetcount = tex.getcount
strings.undefined = "undefined"
@@ -71,16 +77,16 @@ function tracers.cs(csname)
end
function tracers.dimen(name)
- local d = texdimen[name]
+ local d = texgetdimen(name)
return d and number.topoints(d) or strings.undefined
end
function tracers.count(name)
- return texcount[name] or strings.undefined
+ return texgetcount(name) or strings.undefined
end
function tracers.toks(name,limit)
- local t = textoks[name]
+ local t = texgettoks(name)
return t and string.limit(t,tonumber(limit) or 40) or strings.undefined
end
@@ -93,7 +99,19 @@ function tracers.knownlist(name)
return l and #l > 0
end
-function tracers.showlines(filename,linenumber,offset,errorstr)
+local savedluaerror = nil
+
+local function errorreporter(luaerror)
+ if luaerror then
+ logs.enable("lua error") --
+ return logs.reporter("lua error")
+ else
+ logs.enable("tex error")
+ return logs.reporter("tex error")
+ end
+end
+
+function tracers.showlines(filename,linenumber,offset,luaerrorline)
local data = io.loaddata(filename)
if not data or data == "" then
local hash = url.hashed(filename)
@@ -106,35 +124,18 @@ function tracers.showlines(filename,linenumber,offset,errorstr)
end
local lines = data and string.splitlines(data)
if lines and #lines > 0 then
- -- This does not work completely as we cannot access the last Lua error using
- -- table.print(status.list()). This is on the agenda. Eventually we will
- -- have a sequence of checks here (tex, lua, mp) at this end.
- --
- -- Actually, in 0.75+ the lua error message is even weirder as you can
- -- get:
- --
- -- LuaTeX error [string "\directlua "]:3: unexpected symbol near '1' ...
- --
- -- \endgroup \directlua {
- --
- -- So there is some work to be done in the LuaTeX engine.
- --
- local what, where = match(errorstr,[[LuaTeX error :(%d+)]])
- or match(errorstr,[[LuaTeX error %[string "\\(.-lua) "%]:(%d+)]]) -- buglet
- if where then
+ if luaerrorline and luaerrorline > 0 then
-- lua error: linenumber points to last line
local start = "\\startluacode"
local stop = "\\stopluacode"
- local where = tonumber(where)
- if lines[linenumber] == start then
- local n = linenumber
- for i=n,1,-1 do
- if lines[i] == start then
- local n = i + where
- if n <= linenumber then
- linenumber = n
- end
+ local n = linenumber
+ for i=n,1,-1 do
+ if find(lines[i],start) then
+ n = i + luaerrorline - 1
+ if n <= linenumber then
+ linenumber = n
end
+ break
end
end
end
@@ -156,30 +157,84 @@ function tracers.showlines(filename,linenumber,offset,errorstr)
end
end
-function tracers.printerror(offset)
- local inputstack = resolvers.inputstack
- local filename = inputstack[#inputstack] or status.filename
- local linenumber = tonumber(status.linenumber) or 0
+-- this will work ok in >=0.79
+
+-- todo: last tex error has ! prepended
+-- todo: some nested errors have two line numbers
+-- todo: collect errorcontext in string (after code cleanup)
+-- todo: have a separate status.lualinenumber
+
+-- todo: \starttext bla \blank[foo] bla \stoptext
+
+local function processerror(offset)
+ local inputstack = resolvers.inputstack
+ local filename = inputstack[#inputstack] or status.filename
+ local linenumber = tonumber(status.linenumber) or 0
+ --
+ -- print("[[ last tex error: " .. tostring(status.lasterrorstring) .. " ]]")
+ -- print("[[ last lua error: " .. tostring(status.lastluaerrorstring) .. " ]]")
+ -- print("[[ start errorcontext ]]")
+ -- tex.show_context()
+ -- print("\n[[ stop errorcontext ]]")
+ --
+ local lasttexerror = status.lasterrorstring or "?"
+ local lastluaerror = status.lastluaerrorstring or lasttexerror
+ local luaerrorline = match(lastluaerror,[[lua%]?:.-(%d+)]]) or (lastluaerror and find(lastluaerror,"?:0:",1,true) and 0)
+ local report = errorreporter(luaerrorline)
+ tracers.printerror {
+ filename = filename,
+ linenumber = linenumber,
+ lasttexerror = lasttexerror,
+ lastluaerror = lastluaerror,
+ luaerrorline = luaerrorline,
+ offset = tonumber(offset) or 10,
+ }
+end
+
+-- so one can overload the printer if (really) needed
+
+function tracers.printerror(specification)
+ local filename = specification.filename
+ local linenumber = specification.linenumber
+ local lasttexerror = specification.lasttexerror
+ local lastluaerror = specification.lastluaerror
+ local luaerrorline = specification.luaerrorline
+ local offset = specification.offset
+ local report = errorreporter(luaerrorline)
if not filename then
- report_system("error not related to input file: %s ...",status.lasterrorstring)
+ report("error not related to input file: %s ...",lasttexerror)
elseif type(filename) == "number" then
- report_system("error on line %s of filehandle %s: %s ...",linenumber,filename,status.lasterrorstring)
+ report("error on line %s of filehandle %s: %s ...",linenumber,lasttexerror)
else
- -- currently we still get the error message printed to the log/console so we
- -- add a bit of spacing around our variant
- texio.write_nl("\n")
- local errorstr = status.lasterrorstring or "?"
- -- inspect(status.list())
- report_system("error on line %s in file %s: %s ...\n",linenumber,filename,errorstr) -- lua error?
- texio.write_nl(tracers.showlines(filename,linenumber,offset,errorstr),"\n")
+ report_nl()
+ if luaerrorline then
+ report("error on line %s in file %s:\n\n%s",linenumber,filename,lastluaerror)
+-- report("error on line %s in file %s:\n\n%s",linenumber,filename,lasttexerror)
+ else
+ report("error on line %s in file %s: %s",linenumber,filename,lasttexerror)
+ if tex.show_context then
+ report_nl()
+ tex.show_context()
+ end
+ end
+ report_nl()
+ report_str(tracers.showlines(filename,linenumber,offset,tonumber(luaerrorline)))
+ report_nl()
end
end
+local nop = function() end
+
directives.register("system.errorcontext", function(v)
+ local register = callback.register
if v then
- callback.register('show_error_hook', function() tracers.printerror(v) end)
+ register('show_error_message', nop)
+ register('show_error_hook', function() processerror(v) end)
+ register('show_lua_error_hook', nop)
else
- callback.register('show_error_hook', nil)
+ register('show_error_message', nil)
+ register('show_error_hook', nil)
+ register('show_lua_error_hook', nil)
end
end)
diff --git a/Master/texmf-dist/tex/context/base/trac-inf.lua b/Master/texmf-dist/tex/context/base/trac-inf.lua
index eefc15a6f7e..034726ffcdd 100644
--- a/Master/texmf-dist/tex/context/base/trac-inf.lua
+++ b/Master/texmf-dist/tex/context/base/trac-inf.lua
@@ -11,20 +11,24 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local type, tonumber = type, tonumber
-local format, lower = string.format, string.lower
+local type, tonumber, select = type, tonumber, select
+local format, lower, find = string.format, string.lower, string.find
local concat = table.concat
local clock = os.gettimeofday or os.clock -- should go in environment
-statistics = statistics or { }
-local statistics = statistics
+local setmetatableindex = table.setmetatableindex
+local serialize = table.serialize
+local formatters = string.formatters
-statistics.enable = true
-statistics.threshold = 0.01
+statistics = statistics or { }
+local statistics = statistics
+
+statistics.enable = true
+statistics.threshold = 0.01
local statusinfo, n, registered, timers = { }, 0, { }, { }
-table.setmetatableindex(timers,function(t,k)
+setmetatableindex(timers,function(t,k)
local v = { timing = 0, loadtime = 0 }
t[k] = v
return v
@@ -118,6 +122,10 @@ function statistics.show()
if statistics.enable then
-- this code will move
local register = statistics.register
+ register("used platform", function()
+ return format("%s, type: %s, binary subtree: %s",
+ os.platform or "unknown",os.type or "unknown", environment.texos or "unknown")
+ end)
register("luatex banner", function()
return lower(status.banner)
end)
@@ -129,16 +137,25 @@ function statistics.show()
return format("%s direct, %s indirect, %s total", total-indirect, indirect, total)
end)
if jit then
- local status = { jit.status() }
- if status[1] then
- register("luajit status", function()
- return concat(status," ",2)
- end)
+ local jitstatus = { jit.status() }
+ if jitstatus[1] then
+ register("luajit options", concat(jitstatus," ",2))
end
end
-- so far
-- collectgarbage("collect")
- register("current memory usage",statistics.memused)
+ register("lua properties",function()
+ local list = status.list()
+ local hashchar = tonumber(list.luatex_hashchars)
+ local mask = lua.mask or "ascii"
+ return format("engine: %s, used memory: %s, hash type: %s, hash chars: min(%s,40), symbol mask: %s (%s)",
+ jit and "luajit" or "lua",
+ statistics.memused(),
+ list.luatex_hashtype or "default",
+ hashchar and 2^hashchar or "unknown",
+ mask,
+ mask == "utf" and "τεχ" or "tex")
+ end)
register("runtime",statistics.runtime)
logs.newline() -- initial newline
for i=1,#statusinfo do
@@ -175,7 +192,20 @@ function statistics.timed(action)
starttiming("run")
action()
stoptiming("run")
- report("total runtime: %s",elapsedtime("run"))
+ report("total runtime: %s seconds",elapsedtime("run"))
+end
+
+-- goodie
+
+function statistics.tracefunction(base,tag,...)
+ for i=1,select("#",...) do
+ local name = select(i,...)
+ local stat = { }
+ local func = base[name]
+ setmetatableindex(stat,function(t,k) t[k] = 0 return 0 end)
+ base[name] = function(n,k,v) stat[k] = stat[k] + 1 return func(n,k,v) end
+ statistics.register(formatters["%s.%s"](tag,name),function() return serialize(stat,"calls") end)
+ end
end
-- where, not really the best spot for this:
diff --git a/Master/texmf-dist/tex/context/base/trac-jus.lua b/Master/texmf-dist/tex/context/base/trac-jus.lua
index 9d99f059d36..00c871159ad 100644
--- a/Master/texmf-dist/tex/context/base/trac-jus.lua
+++ b/Master/texmf-dist/tex/context/base/trac-jus.lua
@@ -14,20 +14,33 @@ typesetters.checkers = checkers
local a_alignstate = attributes.private("alignstate")
local a_justification = attributes.private("justification")
-local tracers = nodes.tracers
-local setcolor = tracers.colors.set
-local settransparency = tracers.transparencies.set
-
-local new_rule = nodes.pool.rule
-local new_glue = nodes.pool.glue
-local new_kern = nodes.pool.kern
-local concat_nodes = nodes.concat
-local hpack_nodes = node.hpack
-local copy_node = node.copy
-local get_list_dimensions = node.dimensions
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local setlist = nuts.setlist
+
+local traverse_id = nuts.traverse_id
+local get_list_dimensions = nuts.dimensions
+local linked_nodes = nuts.linked
+local copy_node = nuts.copy
+
+local tracedrule = nodes.tracers.pool.nuts.rule
+
+local nodepool = nuts.pool
+
+local new_rule = nodepool.rule
+local new_hlist = nodepool.hlist
+local new_glue = nodepool.glue
+local new_kern = nodepool.kern
+
local hlist_code = nodes.nodecodes.hlist
-local tex_set_attribute = tex.setattribute
+local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
local min_threshold = 0
@@ -36,14 +49,14 @@ local max_threshold = 0
local function set(n)
nodes.tasks.enableaction("mvlbuilders", "typesetters.checkers.handler")
nodes.tasks.enableaction("vboxbuilders","typesetters.checkers.handler")
- tex_set_attribute(a_justification,n or 1)
+ texsetattribute(a_justification,n or 1)
function typesetters.checkers.set(n)
- tex_set_attribute(a_justification,n or 1)
+ texsetattribute(a_justification,n or 1)
end
end
local function reset()
- tex_set_attribute(a_justification,unsetvalue)
+ texsetattribute(a_justification,unsetvalue)
end
checkers.set = set
@@ -62,70 +75,35 @@ trackers.register("visualizers.justification", function(v)
end)
function checkers.handler(head)
- for current in node.traverse_id(hlist_code,head) do
- if current[a_justification] == 1 then
- current[a_justification] = 0
- local width = current.width
+ for current in traverse_id(hlist_code,tonut(head)) do
+ if getattr(current,a_justification) == 1 then
+ setattr(current,a_justification,0)
+ local width = getfield(current,"width")
if width > 0 then
- local list = current.list
+ local list = getlist(current)
if list then
local naturalwidth, naturalheight, naturaldepth = get_list_dimensions(list)
local delta = naturalwidth - width
if naturalwidth == 0 or delta == 0 then
-- special box
elseif delta >= max_threshold then
- local rule = new_rule(delta,naturalheight,naturaldepth)
- list = hpack_nodes(list,width,"exactly")
- if list.glue_set == 1 then
- setcolor(rule,"trace:dr")
- settransparency(rule,"trace:dr")
- else
- setcolor(rule,"trace:db")
- settransparency(rule,"trace:db")
- end
- rule = hpack_nodes(rule)
- rule.width = 0
- rule.height = 0
- rule.depth = 0
- current.list = concat_nodes { list, rule }
- -- current.list = concat_nodes { list, new_kern(-naturalwidth+width), rule }
+ local rule = tracedrule(delta,naturalheight,naturaldepth,getfield(list,"glue_set") == 1 and "trace:dr" or "trace:db")
+ setfield(current,"list",linked_nodes(list,new_hlist(rule)))
elseif delta <= min_threshold then
- local alignstate = list[a_alignstate]
+ local alignstate = getattr(list,a_alignstate)
if alignstate == 1 then
- local rule = new_rule(-delta,naturalheight,naturaldepth)
- setcolor(rule,"trace:dc")
- settransparency(rule,"trace:dc")
- rule = hpack_nodes(rule)
- rule.height = 0
- rule.depth = 0
- rule.width = 0
- current.list = nodes.concat { rule, list }
+ local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dc")
+ setfield(current,"list",linked_nodes(new_hlist(rule),list))
elseif alignstate == 2 then
- local rule = new_rule(-delta/2,naturalheight,naturaldepth)
- setcolor(rule,"trace:dy")
- settransparency(rule,"trace:dy")
- rule = hpack_nodes(rule)
- rule.width = 0
- rule.height = 0
- rule.depth = 0
- current.list = concat_nodes { copy_node(rule), list, new_kern(delta/2), rule }
+ local lrule = tracedrule(-delta/2,naturalheight,naturaldepth,"trace:dy")
+ local rrule = copy_node(lrule)
+ setfield(current,"list",linked_nodes(new_hlist(lrule),list,new_kern(delta/2),new_hlist(rrule)))
elseif alignstate == 3 then
- local rule = new_rule(-delta,naturalheight,naturaldepth)
- setcolor(rule,"trace:dm")
- settransparency(rule,"trace:dm")
- rule = hpack_nodes(rule)
- rule.height = 0
- rule.depth = 0
- current.list = concat_nodes { list, new_kern(delta), rule }
+ local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dm")
+ setfield(current,"list",linked_nodes(list,new_kern(delta),new_hlist(rule)))
else
- local rule = new_rule(-delta,naturalheight,naturaldepth)
- setcolor(rule,"trace:dg")
- settransparency(rule,"trace:dg")
- rule = hpack_nodes(rule)
- rule.height = 0
- rule.depth = 0
- rule.width = 0
- current.list = concat_nodes { list, new_kern(delta), rule }
+ local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dg")
+ setfield(current,"list",linked_nodes(list,new_kern(delta),new_hlist(rule)))
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/trac-lmx.lua b/Master/texmf-dist/tex/context/base/trac-lmx.lua
index 18c7f60204f..41d9305361a 100644
--- a/Master/texmf-dist/tex/context/base/trac-lmx.lua
+++ b/Master/texmf-dist/tex/context/base/trac-lmx.lua
@@ -430,7 +430,7 @@ local optionalspaces = whitespace^0
local dquote = P('"')
-local begincomment = P("")
local beginembedxml = P("")
@@ -505,7 +505,8 @@ local resolvecss = ((beginembedcss * P("lmx-resolve") * optionalspaces) / ""
* ((Carg(1) * C(argumentcss)) / getdefinition)
* gobbledendcss
-local pattern_1 = Cs((commentxml + includexml + includecss + P(1))^0) -- get rid of xml comments asap
+----- pattern_1 = Cs((commentxml + includexml + includecss + P(1))^0) -- get rid of xml comments asap .. not good enough: embedded css and script is in
+
+%lexedcontent%
+
+
+]])
+
+function scite.tohtml(data,lexname,numbered)
+ return htmlfile {
+ lexedcontent = exportstyled(loadedlexers[lexname],data or "",numbered), -- before numberstyles
+ lexingstyles = exportcsslexing(),
+ numberstyles = exportcsslinenumber(),
+ }
+end
+
+function scite.filetohtml(filename,lexname,targetname,numbered)
+ io.savedata(targetname or "util-sci.html",scite.tohtml(io.loaddata(filename),lexname or file.suffix(filename),numbered))
+end
+
+function scite.css()
+ return exportcsslexing() .. "\n" .. exportcsslinenumber()
+end
+
+function scite.html(data,lexname,numbered)
+ return exportstyled(loadedlexers[lexname],data or "",numbered)
+end
+
+local f_tree_entry = formatters['%s']
+
+local htmlfile = utilities.templates.replacer([[
+
+
+
+ context util-sci web page: text
+
+
+
+
+%dirlist%
+
+
+
+]])
+
+function scite.converttree(sourceroot,targetroot,numbered)
+ if lfs.isdir(sourceroot) then
+ statistics.starttiming()
+ local skipped = { }
+ local noffiles = 0
+ dir.makedirs(targetroot)
+ local function scan(sourceroot,targetroot)
+ local tree = { }
+ for name in lfs.dir(sourceroot) do
+ if name ~= "." and name ~= ".." then
+ local sourcename = file.join(sourceroot,name)
+ local targetname = file.join(targetroot,name)
+ local mode = lfs.attributes(sourcename,'mode')
+ if mode == 'file' then
+ local filetype = file.suffix(sourcename)
+ local basename = file.basename(name)
+ local targetname = file.replacesuffix(targetname,"html")
+ if knownlexers[filetype] then
+ report("converting file %a to %a",sourcename,targetname)
+ scite.filetohtml(sourcename,nil,targetname,numbered)
+ noffiles = noffiles + 1
+ tree[#tree+1] = f_tree_entry(file.basename(targetname),basename)
+ else
+ skipped[filetype] = true
+ report("no lexer for %a",sourcename)
+ end
+ else
+ dir.makedirs(targetname)
+ scan(sourcename,targetname)
+ tree[#tree+1] = f_tree_entry(file.join(name,"files.html"),name)
+ end
+ end
+ end
+ report("saving tree in %a",treename)
+ local htmldata = htmlfile {
+ dirlist = concat(tree,"\n"),
+ styles = "",
+ }
+ io.savedata(file.join(targetroot,"files.html"),htmldata)
+ end
+ scan(sourceroot,targetroot)
+ if next(skipped) then
+ report("skipped filetypes: %a",table.concat(table.sortedkeys(skipped)," "))
+ end
+ statistics.stoptiming()
+ report("conversion time for %s files: %s",noffiles,statistics.elapsedtime())
+ end
+end
+
+-- scite.filetohtml("strc-sec.mkiv",nil,"e:/tmp/util-sci.html",true)
+-- scite.filetohtml("syst-aux.mkiv",nil,"e:/tmp/util-sci.html",true)
+
+-- scite.converttree("t:/texmf/tex/context","e:/tmp/html/context",true)
+
+return scite
diff --git a/Master/texmf-dist/tex/context/base/util-seq.lua b/Master/texmf-dist/tex/context/base/util-seq.lua
index 27f95f0eeed..35e6932855c 100644
--- a/Master/texmf-dist/tex/context/base/util-seq.lua
+++ b/Master/texmf-dist/tex/context/base/util-seq.lua
@@ -296,7 +296,7 @@ function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug
args = ",one"
elseif nofarguments == 2 then
args = ",one,two"
- elseif nofarguments == 3 then
+ elseif nofarguments == 3 then -- from here on probably slower than ...
args = ",one,two,three"
elseif nofarguments == 4 then
args = ",one,two,three,four"
@@ -321,6 +321,8 @@ function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug
else
calls[n] = format(" head, ok = %s(head%s) done = done or ok",localized,args)
end
+-- local s = " print('" .. tostring(group) .. " " .. tostring(action) .. " : ' .. tostring(head)) "
+-- calls[n] = s .. calls[n] .. s
end
end
end
diff --git a/Master/texmf-dist/tex/context/base/util-soc.lua b/Master/texmf-dist/tex/context/base/util-soc.lua
index 30301c51027..3a52ee86de3 100644
--- a/Master/texmf-dist/tex/context/base/util-soc.lua
+++ b/Master/texmf-dist/tex/context/base/util-soc.lua
@@ -25,17 +25,17 @@ function mail.send(specification)
local server = specification.server or ""
if not server then
report_mail("no server specified")
- return false
+ return false, "invalid server"
end
local to = specification.to or specification.recepient or ""
if to == "" then
- report_mail("no recepient specified")
- return false
+ report_mail("no recipient specified")
+ return false, "invalid recipient"
end
local from = specification.from or specification.sender or ""
if from == "" then
report_mail("no sender specified")
- return false
+ return false, "invalid sender"
end
local message = { }
local body = specification.body
@@ -68,11 +68,13 @@ function mail.send(specification)
end
end
end
+ local user = specification.user
+ local password = specification.password
local result, detail = smtp.send {
server = specification.server,
port = specification.port,
- user = specification.user,
- password = specification.password,
+ user = user ~= "" and user or nil,
+ password = password ~= "" and password or nil,
from = from,
rcpt = to,
source = smtp.message {
@@ -87,7 +89,9 @@ function mail.send(specification)
}
if detail then
report_mail("error: %s",detail)
+ return false, detail
else
report_mail("message sent")
+ return true
end
end
diff --git a/Master/texmf-dist/tex/context/base/util-sql-imp-swiglib.lua b/Master/texmf-dist/tex/context/base/util-sql-imp-swiglib.lua
index 758c817469d..af7012392b6 100644
--- a/Master/texmf-dist/tex/context/base/util-sql-imp-swiglib.lua
+++ b/Master/texmf-dist/tex/context/base/util-sql-imp-swiglib.lua
@@ -12,7 +12,7 @@ if not modules then modules = { } end modules ['util-sql-swiglib'] = {
-- closer to the original library it's also less dependant.
local concat = table.concat
-local format = string.format
+local format, byte = string.format, string.byte
local lpegmatch = lpeg.match
local setmetatable, type = setmetatable, type
local sleep = os.sleep
@@ -21,9 +21,15 @@ local trace_sql = false trackers.register("sql.trace", function(v
local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
local report_state = logs.reporter("sql","swiglib")
+local helpers = require("swiglib.helpers.core")
local sql = utilities.sql
-local mysql = require("swiglib.mysql.core") -- "5.6"
------ mysql = swiglib("mysql.core") -- "5.6"
+local mysql = require("swiglib.mysql.core") -- "5.6.14"
+----- mysql = swiglib("mysql.core") -- "5.6.14"
+
+local new_u_char_array = helpers.new_u_char_array or helpers.new_ucharArray
+local ucharArray_setitem = helpers.u_char_array_setitem or helpers.ucharArray_setitem
+local int_p_assign = helpers.int_p_assign
+local ulongArray_getitem = helpers.u_long_array_getitem or helpers.ulongArray_getitem
-- inspect(table.sortedkeys(mysql))
@@ -57,13 +63,15 @@ local mysql_free_result = mysql.mysql_free_result
local mysql_use_result = mysql.mysql_use_result
local mysql_error_message = mysql.mysql_error
-local mysql_options_argument = mysql.mysql_options_argument
+----- mysql_options_argument = mysql.mysql_options_argument
local instance = mysql.MYSQL()
local mysql_constant_false = false
local mysql_constant_true = true
+----- util_getbytearray = mysql.util_getbytearray
+
-- if mysql_options_argument then
--
-- mysql_constant_false = mysql_options_argument(false) -- 0 "\0"
@@ -82,6 +90,48 @@ local mysql_constant_true = true
--
-- end
+-- some helpers:
+
+function mysql.options_argument(arg)
+ local targ = type(arg)
+ if targ == "boolean" then
+ local o = new_u_char_array(1)
+ ucharArray_setitem(o,0,arg == true and 64 or 0)
+ return o
+ elseif targ == "string" then
+ local o = new_u_char_array(#arg)
+ ucharArray_setitem(o,0,0)
+ for i=1,#arg do
+ ucharArray_setitem(o,i-1,byte(arg,i))
+ end
+ return o
+ elseif targ == "number" then
+ local o = core.new_int_p()
+ int_p_assign(o, arg)
+ return o
+ else
+ return nil
+ end
+end
+
+-- function mysql.util_unpackbytearray(row,noffields,len)
+-- if row == nil then
+-- return { }
+-- elseif noffields < 1 then
+-- return { }
+-- else
+-- local t = { }
+-- for i=0,noffields-1 do
+-- local l = ulongArray_getitem(len,i) -- zero based ... element from len array
+-- local r = util_getbytearray(row,i,l) -- zero based ... element from len array
+-- t[#t+1]= r
+-- end
+-- return t
+-- end
+-- end
+
+--
+
local typemap = mysql.MYSQL_TYPE_VAR_STRING and {
[mysql.MYSQL_TYPE_VAR_STRING ] = "string",
[mysql.MYSQL_TYPE_STRING ] = "string",
@@ -145,10 +195,10 @@ local function numrows(t)
return t.nofrows
end
--- swig_type
+local fetch_fields_from_current_row = mysql.util_mysql_fetch_fields_from_current_row
+local fetch_all_rows = mysql.util_mysql_fetch_all_rows
--- local ulongArray_getitem = mysql.ulongArray_getitem
--- local util_getbytearray = mysql.util_getbytearray
+-- swig_type
-- local function list(t)
-- local result = t._result_
@@ -163,7 +213,7 @@ end
-- end
-- local function hash(t)
--- local list = util_mysql_fetch_fields_from_current_row(t._result_)
+-- local list = fetch_fields_from_current_row(t._result_)
-- local result = t._result_
-- local fields = t.names
-- local row = mysql_fetch_row(result)
@@ -176,15 +226,12 @@ end
-- return result
-- end
-local util_mysql_fetch_fields_from_current_row = mysql.util_mysql_fetch_fields_from_current_row
-local util_mysql_fetch_all_rows = mysql.util_mysql_fetch_all_rows
-
local function list(t)
- return util_mysql_fetch_fields_from_current_row(t._result_)
+ return fetch_fields_from_current_row(t._result_)
end
local function hash(t)
- local list = util_mysql_fetch_fields_from_current_row(t._result_)
+ local list = fetch_fields_from_current_row(t._result_)
local fields = t.names
local data = { }
for i=1,t.noffields do
@@ -194,7 +241,7 @@ local function hash(t)
end
local function wholelist(t)
- return util_mysql_fetch_all_rows(t._result_)
+ return fetch_all_rows(t._result_)
end
local mt = { __index = {
@@ -453,20 +500,16 @@ local function execute(specification)
end
local wraptemplate = [[
-local mysql = require("swigluamysql") -- will be stored in method
+local mysql = require("swiglib.mysql.core") -- will be stored in method
------ mysql_fetch_row = mysql.mysql_fetch_row
------ mysql_fetch_lengths = mysql.mysql_fetch_lengths
------ util_unpackbytearray = mysql.util_unpackbytearray
-local util_mysql_fetch_fields_from_current_row
- = mysql.util_mysql_fetch_fields_from_current_row
+local fetch_fields = mysql.util_mysql_fetch_fields_from_current_row
-local converters = utilities.sql.converters
-local deserialize = utilities.sql.deserialize
+local converters = utilities.sql.converters
+local deserialize = utilities.sql.deserialize
-local tostring = tostring
-local tonumber = tonumber
-local booleanstring = string.booleanstring
+local tostring = tostring
+local tonumber = tonumber
+local booleanstring = string.booleanstring
%s
@@ -482,10 +525,7 @@ return function(result)
local target = { } -- no %s needed here
result = result._result_
for i=1,nofrows do
- -- local row = mysql_fetch_row(result)
- -- local len = mysql_fetch_lengths(result)
- -- local cells = util_unpackbytearray(row,noffields,len)
- local cells = util_mysql_fetch_fields_from_current_row(result)
+ local cells = fetch_fields(result)
target[%s] = {
%s
}
diff --git a/Master/texmf-dist/tex/context/base/util-sql-users.lua b/Master/texmf-dist/tex/context/base/util-sql-users.lua
index ea8fb4e07f5..a1f4339461c 100644
--- a/Master/texmf-dist/tex/context/base/util-sql-users.lua
+++ b/Master/texmf-dist/tex/context/base/util-sql-users.lua
@@ -17,7 +17,7 @@ local sql = utilities.sql
local format, upper, find, gsub, topattern = string.format, string.upper, string.find, string.gsub, string.topattern
local sumhexa = md5.sumhexa
-local booleanstring = string.booleanstring
+local toboolean = string.toboolean
local sql = utilities.sql
local users = { }
@@ -244,7 +244,7 @@ function users.add(db,specification)
fullname = name or fullname,
password = encryptpassword(specification.password or ""),
group = groupnumbers[specification.group] or groupnumbers.guest,
- enabled = booleanstring(specification.enabled) and "1" or "0",
+ enabled = toboolean(specification.enabled) and "1" or "0",
email = specification.email,
address = specification.address,
theme = specification.theme,
@@ -354,7 +354,7 @@ function users.save(db,id,specification)
fullname = fullname,
password = encryptpassword(password),
group = groupnumbers[group],
- enabled = booleanstring(enabled) and "1" or "0",
+ enabled = toboolean(enabled) and "1" or "0",
email = email,
address = address,
theme = theme,
diff --git a/Master/texmf-dist/tex/context/base/util-sto.lua b/Master/texmf-dist/tex/context/base/util-sto.lua
index 191d6cd73b2..8aafca425b8 100644
--- a/Master/texmf-dist/tex/context/base/util-sto.lua
+++ b/Master/texmf-dist/tex/context/base/util-sto.lua
@@ -103,12 +103,22 @@ end
local function f_empty () return "" end -- t,k
local function f_self (t,k) t[k] = k return k end
local function f_table (t,k) local v = { } t[k] = v return v end
+local function f_number(t,k) t[k] = 0 return 0 end -- t,k,v
local function f_ignore() end -- t,k,v
-local t_empty = { __index = f_empty }
-local t_self = { __index = f_self }
-local t_table = { __index = f_table }
-local t_ignore = { __newindex = f_ignore }
+local f_index = {
+ ["empty"] = f_empty,
+ ["self"] = f_self,
+ ["table"] = f_table,
+ ["number"] = f_number,
+}
+
+local t_index = {
+ ["empty"] = { __index = f_empty },
+ ["self"] = { __index = f_self },
+ ["table"] = { __index = f_table },
+ ["number"] = { __index = f_number },
+}
function table.setmetatableindex(t,f)
if type(t) ~= "table" then
@@ -116,46 +126,30 @@ function table.setmetatableindex(t,f)
end
local m = getmetatable(t)
if m then
- if f == "empty" then
- m.__index = f_empty
- elseif f == "key" then
- m.__index = f_self
- elseif f == "table" then
- m.__index = f_table
- else
- m.__index = f
- end
+ m.__index = f_index[f] or f
else
- if f == "empty" then
- setmetatable(t, t_empty)
- elseif f == "key" then
- setmetatable(t, t_self)
- elseif f == "table" then
- setmetatable(t, t_table)
- else
- setmetatable(t,{ __index = f })
- end
+ setmetatable(t,t_index[f] or { __index = f })
end
return t
end
+local f_index = {
+ ["ignore"] = f_ignore,
+}
+
+local t_index = {
+ ["ignore"] = { __newindex = f_ignore },
+}
+
function table.setmetatablenewindex(t,f)
if type(t) ~= "table" then
f, t = t, { }
end
local m = getmetatable(t)
if m then
- if f == "ignore" then
- m.__newindex = f_ignore
- else
- m.__newindex = f
- end
+ m.__newindex = f_index[f] or f
else
- if f == "ignore" then
- setmetatable(t, t_ignore)
- else
- setmetatable(t,{ __newindex = f })
- end
+ setmetatable(t,t_index[f] or { __newindex = f })
end
return t
end
diff --git a/Master/texmf-dist/tex/context/base/util-str.lua b/Master/texmf-dist/tex/context/base/util-str.lua
index 4890a11d606..52c48badda8 100644
--- a/Master/texmf-dist/tex/context/base/util-str.lua
+++ b/Master/texmf-dist/tex/context/base/util-str.lua
@@ -20,8 +20,24 @@ local utfchar, utfbyte = utf.char, utf.byte
----- loadstripped = utilities.lua.loadstripped
----- setmetatableindex = table.setmetatableindex
-local loadstripped = _LUAVERSION < 5.2 and load or function(str)
- return load(dump(load(str),true)) -- it only makes sense in luajit and luatex where we have a stipped load
+local loadstripped = nil
+
+if _LUAVERSION < 5.2 then
+
+ loadstripped = function(str,shortcuts)
+ return load(str)
+ end
+
+else
+
+ loadstripped = function(str,shortcuts)
+ if shortcuts then
+ return load(dump(load(str),true),nil,nil,shortcuts)
+ else
+ return load(dump(load(str),true))
+ end
+ end
+
end
-- todo: make a special namespace for the formatter
@@ -31,10 +47,12 @@ if not number then number = { } end -- temp hack for luatex-fonts
local stripper = patterns.stripzeros
local function points(n)
+ n = tonumber(n)
return (not n or n == 0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
end
local function basepoints(n)
+ n = tonumber(n)
return (not n or n == 0) and "0bp" or lpegmatch(stripper,format("%.5fbp", n*(7200/7227)/65536))
end
@@ -136,17 +154,105 @@ end
-- print(strings.tabtospace(t[k]))
-- end
-function strings.striplong(str) -- strips all leading spaces
- str = gsub(str,"^%s*","")
- str = gsub(str,"[\n\r]+ *","\n")
- return str
+-- todo: lpeg
+
+-- function strings.striplong(str) -- strips all leading spaces
+-- str = gsub(str,"^%s*","")
+-- str = gsub(str,"[\n\r]+ *","\n")
+-- return str
+-- end
+
+local newline = patterns.newline
+local endofstring = patterns.endofstring
+local whitespace = patterns.whitespace
+local spacer = patterns.spacer
+
+local space = spacer^0
+local nospace = space/""
+local endofline = nospace * newline
+
+local stripend = (whitespace^1 * endofstring)/""
+
+local normalline = (nospace * ((1-space*(newline+endofstring))^1) * nospace)
+
+local stripempty = endofline^1/""
+local normalempty = endofline^1
+local singleempty = endofline * (endofline^0/"")
+local doubleempty = endofline * endofline^-1 * (endofline^0/"")
+
+local stripstart = stripempty^0
+
+local p_prune_normal = Cs ( stripstart * ( stripend + normalline + normalempty )^0 )
+local p_prune_collapse = Cs ( stripstart * ( stripend + normalline + doubleempty )^0 )
+local p_prune_noempty = Cs ( stripstart * ( stripend + normalline + singleempty )^0 )
+local p_retain_normal = Cs ( ( normalline + normalempty )^0 )
+local p_retain_collapse = Cs ( ( normalline + doubleempty )^0 )
+local p_retain_noempty = Cs ( ( normalline + singleempty )^0 )
+
+-- function striplines(str,prune,collapse,noempty)
+-- if prune then
+-- if noempty then
+-- return lpegmatch(p_prune_noempty,str) or str
+-- elseif collapse then
+-- return lpegmatch(p_prune_collapse,str) or str
+-- else
+-- return lpegmatch(p_prune_normal,str) or str
+-- end
+-- else
+-- if noempty then
+-- return lpegmatch(p_retain_noempty,str) or str
+-- elseif collapse then
+-- return lpegmatch(p_retain_collapse,str) or str
+-- else
+-- return lpegmatch(p_retain_normal,str) or str
+-- end
+-- end
+-- end
+
+local striplinepatterns = {
+ ["prune"] = p_prune_normal,
+ ["prune and collapse"] = p_prune_collapse, -- default
+ ["prune and no empty"] = p_prune_noempty,
+ ["retain"] = p_retain_normal,
+ ["retain and collapse"] = p_retain_collapse,
+ ["retain and no empty"] = p_retain_noempty,
+}
+
+strings.striplinepatterns = striplinepatterns
+
+function strings.striplines(str,how)
+ return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str
end
--- local template = string.striplong([[
+strings.striplong = strings.striplines -- for old times sake
+
+-- local str = table.concat( {
+-- " ",
+-- " aap",
+-- " noot mies",
+-- " ",
+-- " ",
+-- " zus wim jet",
+-- "zus wim jet",
+-- " zus wim jet",
+-- " ",
+-- }, "\n")
+
+-- local str = table.concat( {
+-- " aaaa",
+-- " bb",
+-- " cccccc",
+-- }, "\n")
+
+-- for k, v in table.sortedhash(utilities.strings.striplinepatterns) do
+-- logs.report("stripper","method: %s, result: [[%s]]",k,utilities.strings.striplines(str,k))
+-- end
+
+-- inspect(strings.striplong([[
-- aaaa
-- bb
-- cccccc
--- ]])
+-- ]]))
function strings.nice(str)
str = gsub(str,"[:%-+_]+"," ") -- maybe more
@@ -195,12 +301,14 @@ end
-- points %p number (scaled points)
-- basepoints %b number (scaled points)
-- table concat %...t table
+-- table concat %{.}t table
-- serialize %...T sequenced (no nested tables)
+-- serialize %{.}T sequenced (no nested tables)
-- boolean (logic) %l boolean
-- BOOLEAN %L boolean
-- whitespace %...w
-- automatic %...a 'whatever' (string, table, ...)
--- automatic %...a "whatever" (string, table, ...)
+-- automatic %...A "whatever" (string, table, ...)
local n = 0
@@ -262,24 +370,32 @@ function number.signed(i)
end
end
-local preamble = [[
-local type = type
-local tostring = tostring
-local tonumber = tonumber
-local format = string.format
-local concat = table.concat
-local signed = number.signed
-local points = number.points
-local basepoints = number.basepoints
-local utfchar = utf.char
-local utfbyte = utf.byte
-local lpegmatch = lpeg.match
-local nspaces = string.nspaces
-local tracedchar = string.tracedchar
-local autosingle = string.autosingle
-local autodouble = string.autodouble
-local sequenced = table.sequenced
-]]
+local zero = P("0")^1 / ""
+local plus = P("+") / ""
+local minus = P("-")
+local separator = S(".")
+local digit = R("09")
+local trailing = zero^1 * #S("eE")
+local exponent = (S("eE") * (plus + Cs((minus * zero^0 * P(-1))/"") + minus) * zero^0 * (P(-1) * Cc("0") + P(1)^1))
+local pattern_a = Cs(minus^0 * digit^1 * (separator/"" * trailing + separator * (trailing + digit)^0) * exponent)
+local pattern_b = Cs((exponent + P(1))^0)
+
+function number.sparseexponent(f,n)
+ if not n then
+ n = f
+ f = "%e"
+ end
+ local tn = type(n)
+ if tn == "string" then -- cast to number
+ local m = tonumber(n)
+ if m then
+ return lpegmatch((f == "%e" or f == "%E") and pattern_a or pattern_b,format(f,m))
+ end
+ elseif tn == "number" then
+ return lpegmatch((f == "%e" or f == "%E") and pattern_a or pattern_b,format(f,n))
+ end
+ return tostring(n)
+end
local template = [[
%s
@@ -287,6 +403,61 @@ local template = [[
return function(%s) return %s end
]]
+local preamble, environment = "", { }
+
+if _LUAVERSION < 5.2 then
+
+ preamble = [[
+local lpeg=lpeg
+local type=type
+local tostring=tostring
+local tonumber=tonumber
+local format=string.format
+local concat=table.concat
+local signed=number.signed
+local points=number.points
+local basepoints= number.basepoints
+local utfchar=utf.char
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local nspaces=string.nspaces
+local tracedchar=string.tracedchar
+local autosingle=string.autosingle
+local autodouble=string.autodouble
+local sequenced=table.sequenced
+local formattednumber=number.formatted
+local sparseexponent=number.sparseexponent
+ ]]
+
+else
+
+ environment = {
+ global = global or _G,
+ lpeg = lpeg,
+ type = type,
+ tostring = tostring,
+ tonumber = tonumber,
+ format = string.format,
+ concat = table.concat,
+ signed = number.signed,
+ points = number.points,
+ basepoints = number.basepoints,
+ utfchar = utf.char,
+ utfbyte = utf.byte,
+ lpegmatch = lpeg.match,
+ nspaces = string.nspaces,
+ tracedchar = string.tracedchar,
+ autosingle = string.autosingle,
+ autodouble = string.autodouble,
+ sequenced = table.sequenced,
+ formattednumber = number.formatted,
+ sparseexponent = number.sparseexponent,
+ }
+
+end
+
+-- -- --
+
local arguments = { "a1" } -- faster than previously used (select(n,...))
setmetatable(arguments, { __index =
@@ -298,7 +469,7 @@ setmetatable(arguments, { __index =
})
local prefix_any = C((S("+- .") + R("09"))^0)
-local prefix_tab = C((1-R("az","AZ","09","%%"))^0)
+local prefix_tab = P("{") * C((1-P("}"))^0) * P("}") + C((1-R("az","AZ","09","%%"))^0)
-- we've split all cases as then we can optimize them (let's omit the fuzzy u)
@@ -337,7 +508,7 @@ local format_i = function(f)
if f and f ~= "" then
return format("format('%%%si',a%s)",f,n)
else
- return format("a%s",n)
+ return format("format('%%i',a%s)",n) -- why not just tostring()
end
end
@@ -353,6 +524,11 @@ local format_f = function(f)
return format("format('%%%sf',a%s)",f,n)
end
+local format_F = function(f)
+ n = n + 1
+ return format("((a%s == 0 and '0') or (a%s == 1 and '1') or format('%%%sf',a%s))",n,n,f,n)
+end
+
local format_g = function(f)
n = n + 1
return format("format('%%%sg',a%s)",f,n)
@@ -373,6 +549,16 @@ local format_E = function(f)
return format("format('%%%sE',a%s)",f,n)
end
+local format_j = function(f)
+ n = n + 1
+ return format("sparseexponent('%%%se',a%s)",f,n)
+end
+
+local format_J = function(f)
+ n = n + 1
+ return format("sparseexponent('%%%sE',a%s)",f,n)
+end
+
local format_x = function(f)
n = n + 1
return format("format('%%%sx',a%s)",f,n)
@@ -518,6 +704,68 @@ local format_W = function(f) -- handy when doing depth related indent
return format("nspaces[%s]",tonumber(f) or 0)
end
+-- maybe to util-num
+
+local digit = patterns.digit
+local period = patterns.period
+local three = digit * digit * digit
+
+local splitter = Cs (
+ (((1 - (three^1 * period))^1 + C(three)) * (Carg(1) * three)^1 + C((1-period)^1))
+ * (P(1)/"" * Carg(2)) * C(2)
+)
+
+patterns.formattednumber = splitter
+
+function number.formatted(n,sep1,sep2)
+ local s = type(s) == "string" and n or format("%0.2f",n)
+ if sep1 == true then
+ return lpegmatch(splitter,s,1,".",",")
+ elseif sep1 == "." then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ",")
+ elseif sep1 == "," then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ".")
+ else
+ return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".")
+ end
+end
+
+-- print(number.formatted(1))
+-- print(number.formatted(12))
+-- print(number.formatted(123))
+-- print(number.formatted(1234))
+-- print(number.formatted(12345))
+-- print(number.formatted(123456))
+-- print(number.formatted(1234567))
+-- print(number.formatted(12345678))
+-- print(number.formatted(12345678,true))
+-- print(number.formatted(1234.56,"!","?"))
+
+local format_m = function(f)
+ n = n + 1
+ if not f or f == "" then
+ f = ","
+ end
+ return format([[formattednumber(a%s,%q,".")]],n,f)
+end
+
+local format_M = function(f)
+ n = n + 1
+ if not f or f == "" then
+ f = "."
+ end
+ return format([[formattednumber(a%s,%q,",")]],n,f)
+end
+
+--
+
+local format_z = function(f)
+ n = n + (tonumber(f) or 1)
+ return "''" -- okay, not that efficient to append '' but a special case anyway
+end
+
+--
+
local format_rest = function(s)
return format("%q",s) -- catches " and \n and such
end
@@ -544,6 +792,8 @@ local format_extension = function(extensions,f,name)
end
end
+-- aA b cC d eE f gG hH iI jJ lL mM N o p qQ r sS tT uU wW xX z
+
local builder = Cs { "start",
start = (
(
@@ -552,7 +802,7 @@ local builder = Cs { "start",
V("!") -- new
+ V("s") + V("q")
+ V("i") + V("d")
- + V("f") + V("g") + V("G") + V("e") + V("E")
+ + V("f") + V("F") + V("g") + V("G") + V("e") + V("E")
+ V("x") + V("X") + V("o")
--
+ V("c")
@@ -567,11 +817,13 @@ local builder = Cs { "start",
+ V("t") + V("T")
+ V("l") + V("L")
+ V("I")
- + V("h") -- new
+ V("w") -- new
+ V("W") -- new
+ V("a") -- new
+ V("A") -- new
+ + V("j") + V("J") -- stripped e E
+ + V("m") + V("M") -- new
+ + V("z") -- new
--
+ V("*") -- ignores probably messed up %
)
@@ -585,6 +837,7 @@ local builder = Cs { "start",
["i"] = (prefix_any * P("i")) / format_i, -- %i => regular %i (integer)
["d"] = (prefix_any * P("d")) / format_d, -- %d => regular %d (integer)
["f"] = (prefix_any * P("f")) / format_f, -- %f => regular %f (float)
+ ["F"] = (prefix_any * P("F")) / format_F, -- %F => regular %f (float) but 0/1 check
["g"] = (prefix_any * P("g")) / format_g, -- %g => regular %g (float)
["G"] = (prefix_any * P("G")) / format_G, -- %G => regular %G (float)
["e"] = (prefix_any * P("e")) / format_e, -- %e => regular %e (float)
@@ -608,45 +861,63 @@ local builder = Cs { "start",
["b"] = (prefix_any * P("b")) / format_b, -- %b => 12.342bp / maybe: B (and more units)
["t"] = (prefix_tab * P("t")) / format_t, -- %t => concat
["T"] = (prefix_tab * P("T")) / format_T, -- %t => sequenced
- ["l"] = (prefix_tab * P("l")) / format_l, -- %l => boolean
- ["L"] = (prefix_tab * P("L")) / format_L, -- %L => BOOLEAN
+ ["l"] = (prefix_any * P("l")) / format_l, -- %l => boolean
+ ["L"] = (prefix_any * P("L")) / format_L, -- %L => BOOLEAN
["I"] = (prefix_any * P("I")) / format_I, -- %I => signed integer
--
["w"] = (prefix_any * P("w")) / format_w, -- %w => n spaces (optional prefix is added)
["W"] = (prefix_any * P("W")) / format_W, -- %W => mandate prefix, no specifier
--
+ ["j"] = (prefix_any * P("j")) / format_j, -- %j => %e (float) stripped exponent (irrational)
+ ["J"] = (prefix_any * P("J")) / format_J, -- %J => %E (float) stripped exponent (irrational)
+ --
+ ["m"] = (prefix_tab * P("m")) / format_m, -- %m => xxx.xxx.xxx,xx (optional prefix instead of .)
+ ["M"] = (prefix_tab * P("M")) / format_M, -- %M => xxx,xxx,xxx.xx (optional prefix instead of ,)
+ --
+ ["z"] = (prefix_any * P("z")) / format_z, -- %M => xxx,xxx,xxx.xx (optional prefix instead of ,)
+ --
["a"] = (prefix_any * P("a")) / format_a, -- %a => '...' (forces tostring)
["A"] = (prefix_any * P("A")) / format_A, -- %A => "..." (forces tostring)
--
- ["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%%%")^1) / format_rest, -- rest (including %%)
+ ["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%")^1) / format_rest, -- rest (including %%)
--
["!"] = Carg(2) * prefix_any * P("!") * C((1-P("!"))^1) * P("!") / format_extension,
}
-- we can be clever and only alias what is needed
+-- local direct = Cs (
+-- P("%")/""
+-- * Cc([[local format = string.format return function(str) return format("%]])
+-- * (S("+- .") + R("09"))^0
+-- * S("sqidfgGeExXo")
+-- * Cc([[",str) end]])
+-- * P(-1)
+-- )
+
local direct = Cs (
- P("%")/""
- * Cc([[local format = string.format return function(str) return format("%]])
- * (S("+- .") + R("09"))^0
- * S("sqidfgGeExXo")
- * Cc([[",str) end]])
- * P(-1)
- )
+ P("%")
+ * (S("+- .") + R("09"))^0
+ * S("sqidfgGeExXo")
+ * P(-1) / [[local format = string.format return function(str) return format("%0",str) end]]
+)
local function make(t,str)
local f
local p
local p = lpegmatch(direct,str)
if p then
+ -- f = loadstripped(p)()
+ -- print("builder 1 >",p)
f = loadstripped(p)()
else
n = 0
- p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n
+ -- p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n
+ p = lpegmatch(builder,str,1,t._connector_,t._extensions_) -- after this we know n
if n > 0 then
p = format(template,preamble,t._preamble_,arguments[n],p)
--- print("builder>",p)
- f = loadstripped(p)()
+ -- print("builder 2 >",p)
+ f = loadstripped(p,t._environment_)() -- t._environment is not populated (was experiment)
else
f = function() return str end
end
@@ -701,10 +972,28 @@ strings.formatters = { }
-- table (metatable) in which case we could better keep a count and
-- clear that table when a threshold is reached
-function strings.formatters.new()
- local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter" }
- setmetatable(t, { __index = make, __call = use })
- return t
+-- _connector_ is an experiment
+
+if _LUAVERSION < 5.2 then
+
+ function strings.formatters.new(noconcat)
+ local t = { _type_ = "formatter", _connector_ = noconcat and "," or "..", _extensions_ = { }, _preamble_ = preamble, _environment_ = { } }
+ setmetatable(t, { __index = make, __call = use })
+ return t
+ end
+
+else
+
+ function strings.formatters.new(noconcat)
+ local e = { } -- better make a copy as we can overload
+ for k, v in next, environment do
+ e[k] = v
+ end
+ local t = { _type_ = "formatter", _connector_ = noconcat and "," or "..", _extensions_ = { }, _preamble_ = "", _environment_ = e }
+ setmetatable(t, { __index = make, __call = use })
+ return t
+ end
+
end
-- function strings.formatters.new()
@@ -723,8 +1012,12 @@ string.formatter = function(str,...) return formatters[str](...) end -- someti
local function add(t,name,template,preamble)
if type(t) == "table" and t._type_ == "formatter" then
t._extensions_[name] = template or "%s"
- if preamble then
+ if type(preamble) == "string" then
t._preamble_ = preamble .. "\n" .. t._preamble_ -- so no overload !
+ elseif type(preamble) == "table" then
+ for k, v in next, preamble do
+ t._environment_[k] = v
+ end
end
end
end
@@ -733,11 +1026,31 @@ strings.formatters.add = add
-- registered in the default instance (should we fall back on this one?)
-lpeg.patterns.xmlescape = Cs((P("<")/"<" + P(">")/">" + P("&")/"&" + P('"')/""" + P(1))^0)
-lpeg.patterns.texescape = Cs((C(S("#$%\\{}"))/"\\%1" + P(1))^0)
+patterns.xmlescape = Cs((P("<")/"<" + P(">")/">" + P("&")/"&" + P('"')/""" + P(1))^0)
+patterns.texescape = Cs((C(S("#$%\\{}"))/"\\%1" + P(1))^0)
+patterns.luaescape = Cs(((1-S('"\n'))^1 + P('"')/'\\"' + P('\n')/'\\n"')^0) -- maybe also \0
+patterns.luaquoted = Cs(Cc('"') * ((1-S('"\n'))^1 + P('"')/'\\"' + P('\n')/'\\n"')^0 * Cc('"'))
+
+-- escaping by lpeg is faster for strings without quotes, slower on a string with quotes, but
+-- faster again when other q-escapables are found (the ones we don't need to escape)
-add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
-add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+-- add(formatters,"xml", [[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+-- add(formatters,"tex", [[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+-- add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
+
+if _LUAVERSION < 5.2 then
+
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+
+else
+
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape = lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape = lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape = lpeg.patterns.luaescape })
+
+end
-- -- yes or no:
--
diff --git a/Master/texmf-dist/tex/context/base/util-tab.lua b/Master/texmf-dist/tex/context/base/util-tab.lua
index a47c0cb77fd..f9e9b318df3 100644
--- a/Master/texmf-dist/tex/context/base/util-tab.lua
+++ b/Master/texmf-dist/tex/context/base/util-tab.lua
@@ -10,37 +10,40 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch, gsub = string.format, string.gmatch, string.gsub
+local format, gmatch, gsub, sub = string.format, string.gmatch, string.gsub, string.sub
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
local type, next, rawset, tonumber, tostring, load, select = type, next, rawset, tonumber, tostring, load, select
local lpegmatch, P, Cs, Cc = lpeg.match, lpeg.P, lpeg.Cs, lpeg.Cc
-local serialize, sortedkeys, sortedpairs = table.serialize, table.sortedkeys, table.sortedpairs
+local sortedkeys, sortedpairs = table.sortedkeys, table.sortedpairs
local formatters = string.formatters
+local utftoeight = utf.toeight
local splitter = lpeg.tsplitat(".")
-function tables.definetable(target,nofirst,nolast) -- defines undefined tables
- local composed, shortcut, t = nil, nil, { }
+function utilities.tables.definetable(target,nofirst,nolast) -- defines undefined tables
+ local composed, t = nil, { }
local snippets = lpegmatch(splitter,target)
for i=1,#snippets - (nolast and 1 or 0) do
local name = snippets[i]
if composed then
- composed = shortcut .. "." .. name
- shortcut = shortcut .. "_" .. name
- t[#t+1] = formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
+ composed = composed .. "." .. name
+ t[#t+1] = formatters["if not %s then %s = { } end"](composed,composed)
else
composed = name
- shortcut = name
if not nofirst then
t[#t+1] = formatters["%s = %s or { }"](composed,composed)
end
end
end
- if nolast then
- composed = shortcut .. "." .. snippets[#snippets]
+ if composed then
+ if nolast then
+ composed = composed .. "." .. snippets[#snippets]
+ end
+ return concat(t,"\n"), composed -- could be shortcut
+ else
+ return "", target
end
- return concat(t,"\n"), composed
end
-- local t = tables.definedtable("a","b","c","d")
@@ -72,7 +75,7 @@ end
function tables.migratetable(target,v,root)
local t = root or _G
- local names = string.split(target,".")
+ local names = lpegmatch(splitter,target)
for i=1,#names-1 do
local name = names[i]
t[name] = t[name] or { }
@@ -293,100 +296,89 @@ function tables.encapsulate(core,capsule,protect)
end
end
-local function fastserialize(t,r,outer) -- no mixes
- r[#r+1] = "{"
- local n = #t
- if n > 0 then
- for i=1,n do
- local v = t[i]
- local tv = type(v)
- if tv == "string" then
- r[#r+1] = formatters["%q,"](v)
- elseif tv == "number" then
- r[#r+1] = formatters["%s,"](v)
- elseif tv == "table" then
- fastserialize(v,r)
- elseif tv == "boolean" then
- r[#r+1] = formatters["%S,"](v)
+-- best keep [%q] keys (as we have some in older applications i.e. saving user data
+
+local f_hashed_string = formatters["[%q]=%q,"]
+local f_hashed_number = formatters["[%q]=%s,"]
+local f_hashed_boolean = formatters["[%q]=%l,"]
+local f_hashed_table = formatters["[%q]="]
+
+local f_indexed_string = formatters["[%s]=%q,"]
+local f_indexed_number = formatters["[%s]=%s,"]
+local f_indexed_boolean = formatters["[%s]=%l,"]
+local f_indexed_table = formatters["[%s]="]
+
+local f_ordered_string = formatters["%q,"]
+local f_ordered_number = formatters["%s,"]
+local f_ordered_boolean = formatters["%l,"]
+
+function table.fastserialize(t,prefix)
+
+ -- prefix should contain the =
+ -- not sorted
+ -- only number and string indices (currently)
+
+ local r = { type(prefix) == "string" and prefix or "return" }
+ local m = 1
+
+ local function fastserialize(t,outer) -- no mixes
+ local n = #t
+ m = m + 1
+ r[m] = "{"
+ if n > 0 then
+ for i=0,n do
+ local v = t[i]
+ local tv = type(v)
+ if tv == "string" then
+ m = m + 1 r[m] = f_ordered_string(v)
+ elseif tv == "number" then
+ m = m + 1 r[m] = f_ordered_number(v)
+ elseif tv == "table" then
+ fastserialize(v)
+ elseif tv == "boolean" then
+ m = m + 1 r[m] = f_ordered_boolean(v)
+ end
end
end
- else
for k, v in next, t do
- local tv = type(v)
- if tv == "string" then
- r[#r+1] = formatters["[%q]=%q,"](k,v)
- elseif tv == "number" then
- r[#r+1] = formatters["[%q]=%s,"](k,v)
- elseif tv == "table" then
- r[#r+1] = formatters["[%q]="](k)
- fastserialize(v,r)
- elseif tv == "boolean" then
- r[#r+1] = formatters["[%q]=%S,"](k,v)
+ local tk = type(k)
+ if tk == "number" then
+ if k > n or k < 0 then
+ local tv = type(v)
+ if tv == "string" then
+ m = m + 1 r[m] = f_indexed_string(k,v)
+ elseif tv == "number" then
+ m = m + 1 r[m] = f_indexed_number(k,v)
+ elseif tv == "table" then
+ m = m + 1 r[m] = f_indexed_table(k)
+ fastserialize(v)
+ elseif tv == "boolean" then
+ m = m + 1 r[m] = f_indexed_boolean(k,v)
+ end
+ end
+ else
+ local tv = type(v)
+ if tv == "string" then
+ m = m + 1 r[m] = f_hashed_string(k,v)
+ elseif tv == "number" then
+ m = m + 1 r[m] = f_hashed_number(k,v)
+ elseif tv == "table" then
+ m = m + 1 r[m] = f_hashed_table(k)
+ fastserialize(v)
+ elseif tv == "boolean" then
+ m = m + 1 r[m] = f_hashed_boolean(k,v)
+ end
end
end
+ m = m + 1
+ if outer then
+ r[m] = "}"
+ else
+ r[m] = "},"
+ end
+ return r
end
- if outer then
- r[#r+1] = "}"
- else
- r[#r+1] = "},"
- end
- return r
-end
-
--- local f_hashed_string = formatters["[%q]=%q,"]
--- local f_hashed_number = formatters["[%q]=%s,"]
--- local f_hashed_table = formatters["[%q]="]
--- local f_hashed_true = formatters["[%q]=true,"]
--- local f_hashed_false = formatters["[%q]=false,"]
---
--- local f_indexed_string = formatters["%q,"]
--- local f_indexed_number = formatters["%s,"]
--- ----- f_indexed_true = formatters["true,"]
--- ----- f_indexed_false = formatters["false,"]
---
--- local function fastserialize(t,r,outer) -- no mixes
--- r[#r+1] = "{"
--- local n = #t
--- if n > 0 then
--- for i=1,n do
--- local v = t[i]
--- local tv = type(v)
--- if tv == "string" then
--- r[#r+1] = f_indexed_string(v)
--- elseif tv == "number" then
--- r[#r+1] = f_indexed_number(v)
--- elseif tv == "table" then
--- fastserialize(v,r)
--- elseif tv == "boolean" then
--- -- r[#r+1] = v and f_indexed_true(k) or f_indexed_false(k)
--- r[#r+1] = v and "true," or "false,"
--- end
--- end
--- else
--- for k, v in next, t do
--- local tv = type(v)
--- if tv == "string" then
--- r[#r+1] = f_hashed_string(k,v)
--- elseif tv == "number" then
--- r[#r+1] = f_hashed_number(k,v)
--- elseif tv == "table" then
--- r[#r+1] = f_hashed_table(k)
--- fastserialize(v,r)
--- elseif tv == "boolean" then
--- r[#r+1] = v and f_hashed_true(k) or f_hashed_false(k)
--- end
--- end
--- end
--- if outer then
--- r[#r+1] = "}"
--- else
--- r[#r+1] = "},"
--- end
--- return r
--- end
-
-function table.fastserialize(t,prefix) -- so prefix should contain the =
- return concat(fastserialize(t,{ prefix or "return" },true))
+ return concat(fastserialize(t,true))
end
function table.deserialize(str)
@@ -410,6 +402,7 @@ function table.load(filename,loader)
if filename then
local t = (loader or io.loaddata)(filename)
if t and t ~= "" then
+ local t = utftoeight(t)
t = load(t)
if type(t) == "function" then
t = t()
@@ -422,10 +415,14 @@ function table.load(filename,loader)
end
function table.save(filename,t,n,...)
- io.savedata(filename,serialize(t,n == nil and true or n,...))
+ io.savedata(filename,table.serialize(t,n == nil and true or n,...)) -- no frozen table.serialize
end
-local function slowdrop(t)
+local f_key_value = formatters["%s=%q"]
+local f_add_table = formatters[" {%t},\n"]
+local f_return_table = formatters["return {\n%t}"]
+
+local function slowdrop(t) -- maybe less memory (intermediate concat)
local r = { }
local l = { }
for i=1,#t do
@@ -433,28 +430,30 @@ local function slowdrop(t)
local j = 0
for k, v in next, ti do
j = j + 1
- l[j] = formatters["%s=%q"](k,v)
+ l[j] = f_key_value(k,v)
end
- r[i] = formatters[" {%t},\n"](l)
+ r[i] = f_add_table(l)
end
- return formatters["return {\n%st}"](r)
+ return f_return_table(r)
end
local function fastdrop(t)
local r = { "return {\n" }
+ local m = 1
for i=1,#t do
local ti = t[i]
- r[#r+1] = " {"
+ m = m + 1 r[m] = " {"
for k, v in next, ti do
- r[#r+1] = formatters["%s=%q"](k,v)
+ m = m + 1 r[m] = f_key_value(k,v)
end
- r[#r+1] = "},\n"
+ m = m + 1 r[m] = "},\n"
end
- r[#r+1] = "}"
+ m = m + 1
+ r[m] = "}"
return concat(r)
end
-function table.drop(t,slow) -- only { { a=2 }, {a=3} }
+function table.drop(t,slow) -- only { { a=2 }, {a=3} } -- for special cases
if #t == 0 then
return "return { }"
elseif slow == true then
@@ -464,6 +463,9 @@ function table.drop(t,slow) -- only { { a=2 }, {a=3} }
end
end
+-- inspect(table.drop({ { a=2 }, {a=3} }))
+-- inspect(table.drop({ { a=2 }, {a=3} },true))
+
function table.autokey(t,k)
local v = { }
t[k] = v
@@ -491,3 +493,249 @@ function table.twowaymapper(t)
return t
end
+-- The next version is somewhat faster, although in practice one will seldom
+-- serialize a lot using this one. Often the above variants are more efficient.
+-- If we would really need this a lot, we could hash q keys, or just not used
+-- indented code.
+
+-- char-def.lua : 0.53 -> 0.38
+-- husayni.tma : 0.28 -> 0.19
+
+local f_start_key_idx = formatters["%w{"]
+local f_start_key_num = formatters["%w[%s]={"]
+local f_start_key_str = formatters["%w[%q]={"]
+local f_start_key_boo = formatters["%w[%l]={"]
+local f_start_key_nop = formatters["%w{"]
+
+local f_stop = formatters["%w},"]
+
+local f_key_num_value_num = formatters["%w[%s]=%s,"]
+local f_key_str_value_num = formatters["%w[%q]=%s,"]
+local f_key_boo_value_num = formatters["%w[%l]=%s,"]
+
+local f_key_num_value_str = formatters["%w[%s]=%q,"]
+local f_key_str_value_str = formatters["%w[%q]=%q,"]
+local f_key_boo_value_str = formatters["%w[%l]=%q,"]
+
+local f_key_num_value_boo = formatters["%w[%s]=%l,"]
+local f_key_str_value_boo = formatters["%w[%q]=%l,"]
+local f_key_boo_value_boo = formatters["%w[%l]=%l,"]
+
+local f_key_num_value_not = formatters["%w[%s]={},"]
+local f_key_str_value_not = formatters["%w[%q]={},"]
+local f_key_boo_value_not = formatters["%w[%l]={},"]
+
+local f_key_num_value_seq = formatters["%w[%s]={ %, t },"]
+local f_key_str_value_seq = formatters["%w[%q]={ %, t },"]
+local f_key_boo_value_seq = formatters["%w[%l]={ %, t },"]
+
+local f_val_num = formatters["%w%s,"]
+local f_val_str = formatters["%w%q,"]
+local f_val_boo = formatters["%w%l,"]
+local f_val_not = formatters["%w{},"]
+local f_val_seq = formatters["%w{ %, t },"]
+
+local f_table_return = formatters["return {"]
+local f_table_name = formatters["%s={"]
+local f_table_direct = formatters["{"]
+local f_table_entry = formatters["[%q]={"]
+local f_table_finish = formatters["}"]
+
+----- f_string = formatters["%q"]
+
+local spaces = utilities.strings.newrepeater(" ")
+
+local serialize = table.serialize -- the extensive one, the one we started with
+
+-- there is still room for optimization: index run, key run, but i need to check with the
+-- latest lua for the value of #n (with holes) .. anyway for tracing purposes we want
+-- indices / keys being sorted, so it will never be real fast
+
+function table.serialize(root,name,specification)
+
+ if type(specification) == "table" then
+ return serialize(root,name,specification) -- the original one
+ end
+
+ local t -- = { }
+ local n = 1
+
+ local function simple_table(t)
+ if #t > 0 then
+ local n = 0
+ for _, v in next, t do
+ n = n + 1
+ if type(v) == "table" then
+ return nil
+ end
+ end
+ if n == #t then
+ local tt = { }
+ local nt = 0
+ for i=1,#t do
+ local v = t[i]
+ local tv = type(v)
+ nt = nt + 1
+ if tv == "number" then
+ tt[nt] = v
+ elseif tv == "string" then
+ tt[nt] = format("%q",v) -- f_string(v)
+ elseif tv == "boolean" then
+ tt[nt] = v and "true" or "false"
+ else
+ return nil
+ end
+ end
+ return tt
+ end
+ end
+ return nil
+ end
+
+ local function do_serialize(root,name,depth,level,indexed)
+ if level > 0 then
+ n = n + 1
+ if indexed then
+ t[n] = f_start_key_idx(depth)
+ else
+ local tn = type(name)
+ if tn == "number" then
+ t[n] = f_start_key_num(depth,name)
+ elseif tn == "string" then
+ t[n] = f_start_key_str(depth,name)
+ elseif tn == "boolean" then
+ t[n] = f_start_key_boo(depth,name)
+ else
+ t[n] = f_start_key_nop(depth)
+ end
+ end
+ depth = depth + 1
+ end
+ -- we could check for k (index) being number (cardinal)
+ if root and next(root) then
+ local first = nil
+ local last = 0
+ last = #root
+ for k=1,last do
+ if root[k] == nil then
+ last = k - 1
+ break
+ end
+ end
+ if last > 0 then
+ first = 1
+ end
+ local sk = sortedkeys(root) -- inline fast version?
+ for i=1,#sk do
+ local k = sk[i]
+ local v = root[k]
+ local tv = type(v)
+ local tk = type(k)
+ if first and tk == "number" and k >= first and k <= last then
+ if tv == "number" then
+ n = n + 1 t[n] = f_val_num(depth,v)
+ elseif tv == "string" then
+ n = n + 1 t[n] = f_val_str(depth,v)
+ elseif tv == "table" then
+ if not next(v) then
+ n = n + 1 t[n] = f_val_not(depth)
+ else
+ local st = simple_table(v)
+ if st then
+ n = n + 1 t[n] = f_val_seq(depth,st)
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ end
+ elseif tv == "boolean" then
+ n = n + 1 t[n] = f_val_boo(depth,v)
+ end
+ elseif tv == "number" then
+ if tk == "number" then
+ n = n + 1 t[n] = f_key_num_value_num(depth,k,v)
+ elseif tk == "string" then
+ n = n + 1 t[n] = f_key_str_value_num(depth,k,v)
+ elseif tk == "boolean" then
+ n = n + 1 t[n] = f_key_boo_value_num(depth,k,v)
+ end
+ elseif tv == "string" then
+ if tk == "number" then
+ n = n + 1 t[n] = f_key_num_value_str(depth,k,v)
+ elseif tk == "string" then
+ n = n + 1 t[n] = f_key_str_value_str(depth,k,v)
+ elseif tk == "boolean" then
+ n = n + 1 t[n] = f_key_boo_value_str(depth,k,v)
+ end
+ elseif tv == "table" then
+ if not next(v) then
+ if tk == "number" then
+ n = n + 1 t[n] = f_key_num_value_not(depth,k,v)
+ elseif tk == "string" then
+ n = n + 1 t[n] = f_key_str_value_not(depth,k,v)
+ elseif tk == "boolean" then
+ n = n + 1 t[n] = f_key_boo_value_not(depth,k,v)
+ end
+ else
+ local st = simple_table(v)
+ if not st then
+ do_serialize(v,k,depth,level+1)
+ elseif tk == "number" then
+ n = n + 1 t[n] = f_key_num_value_seq(depth,k,st)
+ elseif tk == "string" then
+ n = n + 1 t[n] = f_key_str_value_seq(depth,k,st)
+ elseif tk == "boolean" then
+ n = n + 1 t[n] = f_key_boo_value_seq(depth,k,st)
+ end
+ end
+ elseif tv == "boolean" then
+ if tk == "number" then
+ n = n + 1 t[n] = f_key_num_value_boo(depth,k,v)
+ elseif tk == "string" then
+ n = n + 1 t[n] = f_key_str_value_boo(depth,k,v)
+ elseif tk == "boolean" then
+ n = n + 1 t[n] = f_key_boo_value_boo(depth,k,v)
+ end
+ end
+ end
+ end
+ if level > 0 then
+ n = n + 1 t[n] = f_stop(depth-1)
+ end
+ end
+
+ local tname = type(name)
+
+ if tname == "string" then
+ if name == "return" then
+ t = { f_table_return() }
+ else
+ t = { f_table_name(name) }
+ end
+ elseif tname == "number" then
+ t = { f_table_entry(name) }
+ elseif tname == "boolean" then
+ if name then
+ t = { f_table_return() }
+ else
+ t = { f_table_direct() }
+ end
+ else
+ t = { f_table_name("t") }
+ end
+
+ if root then
+ -- The dummy access will initialize a table that has a delayed initialization
+ -- using a metatable. (maybe explicitly test for metatable)
+ if getmetatable(root) then -- todo: make this an option, maybe even per subtable
+ local dummy = root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_ = nil
+ end
+ -- Let's forget about empty tables.
+ if next(root) then
+ do_serialize(root,name,1,0)
+ end
+ end
+ n = n + 1
+ t[n] = f_table_finish()
+ return concat(t,"\n")
+end
diff --git a/Master/texmf-dist/tex/context/base/util-tpl.lua b/Master/texmf-dist/tex/context/base/util-tpl.lua
index 7a6abefd692..67d05822116 100644
--- a/Master/texmf-dist/tex/context/base/util-tpl.lua
+++ b/Master/texmf-dist/tex/context/base/util-tpl.lua
@@ -17,8 +17,8 @@ local trace_template = false trackers.register("templates.trace",function(v) t
local report_template = logs.reporter("template")
local tostring = tostring
-local format, sub = string.format, string.sub
-local P, C, Cs, Carg, lpegmatch = lpeg.P, lpeg.C, lpeg.Cs, lpeg.Carg, lpeg.match
+local format, sub, byte = string.format, string.sub, string.byte
+local P, C, R, Cs, Cc, Carg, lpegmatch, lpegpatterns = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Cc, lpeg.Carg, lpeg.match, lpeg.patterns
-- todo: make installable template.new
@@ -52,7 +52,10 @@ local sqlescape = lpeg.replacer {
-- { "\t", "\\t" },
}
-local sqlquotedescape = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'"))
+local sqlquoted = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'"))
+
+lpegpatterns.sqlescape = sqlescape
+lpegpatterns.sqlquoted = sqlquoted
-- escapeset : \0\1\2\3\4\5\6\7\8\9\10\11\12\13\14\15\16\17\18\19\20\21\22\23\24\25\26\27\28\29\30\31\"\\\127
-- test string: [[1\0\31test23"\\]] .. string.char(19) .. "23"
@@ -68,7 +71,7 @@ local sqlquotedescape = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'"))
-- }
--
-- slightly faster:
---
+
-- local luaescape = Cs ((
-- P('"' ) / [[\"]] +
-- P('\\') / [[\\]] +
@@ -78,9 +81,16 @@ local sqlquotedescape = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'"))
-- P(1)
-- )^0)
+----- xmlescape = lpegpatterns.xmlescape
+----- texescape = lpegpatterns.texescape
+local luaescape = lpegpatterns.luaescape
+----- sqlquoted = lpegpatterns.sqlquoted
+----- luaquoted = lpegpatterns.luaquoted
+
local escapers = {
lua = function(s)
- return sub(format("%q",s),2,-2)
+ -- return sub(format("%q",s),2,-2)
+ return lpegmatch(luaescape,s)
end,
sql = function(s)
return lpegmatch(sqlescape,s)
@@ -89,16 +99,14 @@ local escapers = {
local quotedescapers = {
lua = function(s)
+ -- return lpegmatch(luaquoted,s)
return format("%q",s)
end,
sql = function(s)
- return lpegmatch(sqlquotedescape,s)
+ return lpegmatch(sqlquoted,s)
end,
}
-lpeg.patterns.sqlescape = sqlescape
-lpeg.patterns.sqlescape = sqlquotedescape
-
local luaescaper = escapers.lua
local quotedluaescaper = quotedescapers.lua
@@ -151,6 +159,14 @@ end
templates.replace = replace
+function templates.replacer(str,how,recurse) -- reads nicer
+ return function(mapping)
+ return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
+ end
+end
+
+-- local cmd = templates.replacer([[foo %bar%]]) print(cmd { bar = "foo" })
+
function templates.load(filename,mapping,how,recurse)
local data = io.loaddata(filename) or ""
if mapping and next(mapping) then
diff --git a/Master/texmf-dist/tex/context/base/util-you.lua b/Master/texmf-dist/tex/context/base/util-you.lua
new file mode 100644
index 00000000000..79a0e83e781
--- /dev/null
+++ b/Master/texmf-dist/tex/context/base/util-you.lua
@@ -0,0 +1,256 @@
+if not modules then modules = { } end modules ['util-you'] = {
+ version = 1.002,
+ comment = "library for fetching data from youless kwh meter polling device",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE",
+ license = "see context related readme files"
+}
+
+-- See mtx-youless.lua and s-youless.mkiv for examples of usage.
+--
+-- todo: already calculate min, max and average per hour and discard
+-- older data, or maybe a condense option
+--
+-- maybe just a special parser but who cares about speed here
+--
+-- curl -c pw.txt http://192.168.2.50/L?w=pwd
+-- curl -b pw.txt http://192.168.2.50/V?...
+--
+-- the socket library barks on an (indeed) invalid header ... unfortunately we cannot
+-- pass a password with each request ... although the youless is a rather nice gadget,
+-- the weak part is in the http polling
+
+require("util-jsn")
+
+-- the library variant:
+
+utilities = utilities or { }
+local youless = { }
+utilities.youless = youless
+
+local lpegmatch = lpeg.match
+local formatters = string.formatters
+
+-- dofile("http.lua")
+
+local http = socket.http
+
+local f_normal = formatters["http://%s/V?%s=%i&f=j"]
+local f_password = formatters["http://%s/L?w=%s"]
+
+local function fetch(url,password,what,i)
+ local url = f_normal(url,what,i)
+ local data, h = http.request(url)
+ local result = data and utilities.json.tolua(data)
+ return result
+end
+
+-- "123" " 1,234"
+
+local tovalue = lpeg.Cs((lpeg.R("09") + lpeg.P(1)/"")^1) / tonumber
+
+-- "2013-11-12T06:40:00"
+
+local totime = (lpeg.C(4) / tonumber) * lpeg.P("-")
+ * (lpeg.C(2) / tonumber) * lpeg.P("-")
+ * (lpeg.C(2) / tonumber) * lpeg.P("T")
+ * (lpeg.C(2) / tonumber) * lpeg.P(":")
+ * (lpeg.C(2) / tonumber) * lpeg.P(":")
+ * (lpeg.C(2) / tonumber)
+
+local function get(url,password,what,i,data,average,variant)
+ if not data then
+ data = { }
+ end
+ while true do
+ local d = fetch(url,password,what,i)
+ if d and next(d) then
+ local c_year, c_month, c_day, c_hour, c_minute, c_seconds = lpegmatch(totime,d.tm)
+ if c_year and c_seconds then
+ local delta = tonumber(d.dt)
+ local tnum = os.time { year = c_year, month = c_month, day = c_day, hour = c_hour, minute = c_minute }
+ local v = d.val
+ for i=1,#v do
+ local newvalue = lpegmatch(tovalue,v[i])
+ if newvalue then
+ local t = tnum + (i-1)*delta
+ local current = os.date("%Y-%m-%dT%H:%M:%S",t)
+ local c_year, c_month, c_day, c_hour, c_minute, c_seconds = lpegmatch(totime,current)
+ if c_year and c_seconds then
+ local years = data.years if not years then years = { } data.years = years end
+ local d_year = years[c_year] if not d_year then d_year = { } years[c_year] = d_year end
+ local months = d_year.months if not months then months = { } d_year.months = months end
+ local d_month = months[c_month] if not d_month then d_month = { } months[c_month] = d_month end
+ local days = d_month.days if not days then days = { } d_month.days = days end
+ local d_day = days[c_day] if not d_day then d_day = { } days[c_day] = d_day end
+ if average then
+ d_day.average = newvalue
+ else
+ local hours = d_day.hours if not hours then hours = { } d_day.hours = hours end
+ local d_hour = hours[c_hour] if not d_hour then d_hour = { } hours[c_hour] = d_hour end
+ d_hour[c_minute] = newvalue
+ end
+ end
+ end
+ end
+ end
+ else
+ return data
+ end
+ i = i + 1
+ end
+ return data
+end
+
+-- day of month (kwh)
+-- url = http://192.168.1.14/V?m=2
+-- m = the number of month (jan = 1, feb = 2, ..., dec = 12)
+
+-- hour of day (watt)
+-- url = http://192.168.1.14/V?d=1
+-- d = the number of days ago (today = 0, yesterday = 1, etc.)
+
+-- 10 minutes (watt)
+-- url = http://192.168.1.14/V?w=1
+-- w = 1 for the interval now till 8 hours ago.
+-- w = 2 for the interval 8 till 16 hours ago.
+-- w = 3 for the interval 16 till 24 hours ago.
+
+-- 1 minute (watt)
+-- url = http://192.168.1.14/V?h=1
+-- h = 1 for the interval now till 30 minutes ago.
+-- h = 2 for the interval 30 till 60 minutes ago
+
+function youless.collect(specification)
+ if type(specification) ~= "table" then
+ return
+ end
+ local host = specification.host or ""
+ local data = specification.data or { }
+ local filename = specification.filename or ""
+ local variant = specification.variant or "kwh"
+ local detail = specification.detail or false
+ local nobackup = specification.nobackup or false
+ local password = specification.password or ""
+ if host == "" then
+ return
+ end
+ if filename == "" then
+ return
+ else
+ data = table.load(filename) or data
+ end
+ if variant == "kwh" then
+ get(host,password,"m",1,data,true)
+ elseif variant == "watt" then
+ get(host,password,"d",0,data,true)
+ get(host,password,"w",1,data)
+ if detail then
+ get(host,password,"h",1,data) -- todo: get this for calculating the precise max
+ end
+ else
+ return
+ end
+ local path = file.dirname(filename)
+ local base = file.basename(filename)
+ data.variant = variant
+ data.host = host
+ data.updated = os.now()
+ if nobackup then
+ -- saved but with checking
+ local tempname = file.join(path,"youless.tmp")
+ table.save(tempname,data)
+ local check = table.load(tempname)
+ if type(check) == "table" then
+ local keepname = file.replacesuffix(filename,"old")
+ os.remove(keepname)
+ if not lfs.isfile(keepname) then
+ os.rename(filename,keepname)
+ os.rename(tempname,filename)
+ end
+ end
+ else
+ local keepname = file.join(path,formatters["%s-%s"](os.date("%Y-%m-%d-%H-%M-%S",os.time()),base))
+ os.rename(filename,keepname)
+ if not lfs.isfile(filename) then
+ table.save(filename,data)
+ end
+ end
+ return data
+end
+
+-- local data = youless.collect {
+-- host = "192.168.2.50",
+-- variant = "watt",
+-- filename = "youless-watt.lua"
+-- }
+
+-- inspect(data)
+
+-- local data = youless.collect {
+-- host = "192.168.2.50",
+-- variant = "kwh",
+-- filename = "youless-kwh.lua"
+-- }
+
+-- inspect(data)
+
+function youless.analyze(data)
+ if data and data.variant == "watt" and data.years then
+ for y, year in next, data.years do
+ local a_year, n_year, m_year = 0, 0, 0
+ if year.months then
+ for m, month in next, year.months do
+ local a_month, n_month = 0, 0
+ if month.days then
+ for d, day in next, month.days do
+ local a_day, n_day = 0, 0
+ if day.hours then
+ for h, hour in next, day.hours do
+ local a_hour, n_hour, m_hour = 0, 0, 0
+ for k, v in next, hour do
+ if type(k) == "number" then
+ a_hour = a_hour + v
+ n_hour = n_hour + 1
+ if v > m_hour then
+ m_hour = v
+ end
+ end
+ end
+ n_day = n_day + n_hour
+ a_day = a_day + a_hour
+ hour.maxwatt = m_hour
+ hour.watt = a_hour / n_hour
+ if m_hour > m_year then
+ m_year = m_hour
+ end
+ end
+ end
+ if n_day > 0 then
+ a_month = a_month + a_day
+ n_month = n_month + n_day
+ day.watt = a_day / n_day
+ else
+ day.watt = 0
+ end
+ end
+ end
+ if n_month > 0 then
+ a_year = a_year + a_month
+ n_year = n_year + n_month
+ month.watt = a_month / n_month
+ else
+ month.watt = 0
+ end
+ end
+ end
+ if n_year > 0 then
+ year.watt = a_year / n_year
+ year.maxwatt = m_year
+ else
+ year.watt = 0
+ year.maxwatt = 0
+ end
+ end
+ end
+end
diff --git a/Master/texmf-dist/tex/context/base/x-asciimath.lua b/Master/texmf-dist/tex/context/base/x-asciimath.lua
index 5ef741ce383..992c37eaebd 100644
--- a/Master/texmf-dist/tex/context/base/x-asciimath.lua
+++ b/Master/texmf-dist/tex/context/base/x-asciimath.lua
@@ -140,6 +140,8 @@ local reserved = {
}
+table.setmetatableindex(reserved,characters.entities)
+
local postmapper = Cs ( (
P("\\mathoptext ") * spaces * (P("\\bgroup ")/"{") * (1-P("\\egroup "))^1 * (P("\\egroup ")/"}") +
diff --git a/Master/texmf-dist/tex/context/base/x-mathml.lua b/Master/texmf-dist/tex/context/base/x-mathml.lua
index 31483bbeabf..baf839ad8ae 100644
--- a/Master/texmf-dist/tex/context/base/x-mathml.lua
+++ b/Master/texmf-dist/tex/context/base/x-mathml.lua
@@ -62,6 +62,7 @@ local o_replacements = { -- in main table
["{"] = "\\mmlleftdelimiter \\lbrace",
["}"] = "\\mmlrightdelimiter\\rbrace",
["|"] = "\\mmlleftorrightdelimiter\\vert",
+ ["/"] = "\\mmlleftorrightdelimiter\\solidus",
[doublebar] = "\\mmlleftorrightdelimiter\\Vert",
["("] = "\\mmlleftdelimiter(",
[")"] = "\\mmlrightdelimiter)",
@@ -81,8 +82,9 @@ local o_replacements = { -- in main table
-- [utfchar(0xF103C)] = "\\mmlleftdelimiter<",
[utfchar(0xF1026)] = "\\mmlchar{38}",
+ [utfchar(0x02061)] = "", -- function applicator sometimes shows up in font
-- [utfchar(0xF103E)] = "\\mmlleftdelimiter>",
-
+ -- [utfchar(0x000AF)] = '\\mmlchar{"203E}', -- 0x203E
}
local simpleoperatorremapper = utf.remapper(o_replacements)
@@ -90,62 +92,62 @@ local simpleoperatorremapper = utf.remapper(o_replacements)
--~ languages.data.labels.functions
local i_replacements = {
- ["sin"] = "\\mathopnolimits{sin}",
- ["cos"] = "\\mathopnolimits{cos}",
- ["abs"] = "\\mathopnolimits{abs}",
- ["arg"] = "\\mathopnolimits{arg}",
- ["codomain"] = "\\mathopnolimits{codomain}",
- ["curl"] = "\\mathopnolimits{curl}",
- ["determinant"] = "\\mathopnolimits{det}",
- ["divergence"] = "\\mathopnolimits{div}",
- ["domain"] = "\\mathopnolimits{domain}",
- ["gcd"] = "\\mathopnolimits{gcd}",
- ["grad"] = "\\mathopnolimits{grad}",
- ["identity"] = "\\mathopnolimits{id}",
- ["image"] = "\\mathopnolimits{image}",
- ["lcm"] = "\\mathopnolimits{lcm}",
- ["lim"] = "\\mathopnolimits{lim}",
- ["max"] = "\\mathopnolimits{max}",
- ["median"] = "\\mathopnolimits{median}",
- ["min"] = "\\mathopnolimits{min}",
- ["mode"] = "\\mathopnolimits{mode}",
- ["mod"] = "\\mathopnolimits{mod}",
- ["polar"] = "\\mathopnolimits{Polar}",
- ["exp"] = "\\mathopnolimits{exp}",
- ["ln"] = "\\mathopnolimits{ln}",
- ["log"] = "\\mathopnolimits{log}",
- ["sin"] = "\\mathopnolimits{sin}",
- ["arcsin"] = "\\mathopnolimits{arcsin}",
- ["sinh"] = "\\mathopnolimits{sinh}",
- ["arcsinh"] = "\\mathopnolimits{arcsinh}",
- ["cos"] = "\\mathopnolimits{cos}",
- ["arccos"] = "\\mathopnolimits{arccos}",
- ["cosh"] = "\\mathopnolimits{cosh}",
- ["arccosh"] = "\\mathopnolimits{arccosh}",
- ["tan"] = "\\mathopnolimits{tan}",
- ["arctan"] = "\\mathopnolimits{arctan}",
- ["tanh"] = "\\mathopnolimits{tanh}",
- ["arctanh"] = "\\mathopnolimits{arctanh}",
- ["cot"] = "\\mathopnolimits{cot}",
- ["arccot"] = "\\mathopnolimits{arccot}",
- ["coth"] = "\\mathopnolimits{coth}",
- ["arccoth"] = "\\mathopnolimits{arccoth}",
- ["csc"] = "\\mathopnolimits{csc}",
- ["arccsc"] = "\\mathopnolimits{arccsc}",
- ["csch"] = "\\mathopnolimits{csch}",
- ["arccsch"] = "\\mathopnolimits{arccsch}",
- ["sec"] = "\\mathopnolimits{sec}",
- ["arcsec"] = "\\mathopnolimits{arcsec}",
- ["sech"] = "\\mathopnolimits{sech}",
- ["arcsech"] = "\\mathopnolimits{arcsech}",
+ ["sin"] = "\\sin",
+ ["cos"] = "\\cos",
+ ["abs"] = "\\abs",
+ ["arg"] = "\\arg",
+ ["codomain"] = "\\codomain",
+ ["curl"] = "\\curl",
+ ["determinant"] = "\\det",
+ ["divergence"] = "\\div",
+ ["domain"] = "\\domain",
+ ["gcd"] = "\\gcd",
+ ["grad"] = "\\grad",
+ ["identity"] = "\\id",
+ ["image"] = "\\image",
+ ["lcm"] = "\\lcm",
+ ["lim"] = "\\lim",
+ ["max"] = "\\max",
+ ["median"] = "\\median",
+ ["min"] = "\\min",
+ ["mode"] = "\\mode",
+ ["mod"] = "\\mod",
+ ["polar"] = "\\Polar",
+ ["exp"] = "\\exp",
+ ["ln"] = "\\ln",
+ ["log"] = "\\log",
+ ["sin"] = "\\sin",
+ ["arcsin"] = "\\arcsin",
+ ["sinh"] = "\\sinh",
+ ["arcsinh"] = "\\arcsinh",
+ ["cos"] = "\\cos",
+ ["arccos"] = "\\arccos",
+ ["cosh"] = "\\cosh",
+ ["arccosh"] = "\\arccosh",
+ ["tan"] = "\\tan",
+ ["arctan"] = "\\arctan",
+ ["tanh"] = "\\tanh",
+ ["arctanh"] = "\\arctanh",
+ ["cot"] = "\\cot",
+ ["arccot"] = "\\arccot",
+ ["coth"] = "\\coth",
+ ["arccoth"] = "\\arccoth",
+ ["csc"] = "\\csc",
+ ["arccsc"] = "\\arccsc",
+ ["csch"] = "\\csch",
+ ["arccsch"] = "\\arccsch",
+ ["sec"] = "\\sec",
+ ["arcsec"] = "\\arcsec",
+ ["sech"] = "\\sech",
+ ["arcsech"] = "\\arcsech",
[" "] = "",
- ["false"] = "{\\mr false}",
- ["notanumber"] = "{\\mr NaN}",
- ["otherwise"] = "{\\mr otherwise}",
- ["true"] = "{\\mr true}",
- ["declare"] = "{\\mr declare}",
- ["as"] = "{\\mr as}",
+ ["false"] = "{\\mathrm false}",
+ ["notanumber"] = "{\\mathrm NaN}",
+ ["otherwise"] = "{\\mathrm otherwise}",
+ ["true"] = "{\\mathrm true}",
+ ["declare"] = "{\\mathrm declare}",
+ ["as"] = "{\\mathrm as}",
}
-- we could use a metatable or when accessing fallback on the
@@ -478,25 +480,30 @@ end
function mathml.mo(id)
local str = xmlcontent(getid(id)) or ""
local rep = gsub(str,"&.-;","") -- todo
- context(simpleoperatorremapper(rep))
+ context(simpleoperatorremapper(rep) or rep)
end
function mathml.mi(id)
-- we need to strip comments etc .. todo when reading in tree
local e = getid(id)
local str = e.dt
- if type(str) == "string" then
+ if type(str) == "table" then
local n = #str
if n == 0 then
-- nothing to do
elseif n == 1 then
- local str = gsub(str[1],"&.-;","") -- bah
- local rep = i_replacements[str]
- if not rep then
- rep = gsub(str,".",i_replacements)
+ local first = str[1]
+ if type(first) == "string" then
+ local str = gsub(first,"&.-;","") -- bah
+ local rep = i_replacements[str]
+ if not rep then
+ rep = gsub(str,".",i_replacements)
+ end
+ context(rep)
+ -- context.mi(rep)
+ else
+ context.xmlflush(id) -- xmlsprint or so
end
- context(rep)
- -- context.mi(rep)
else
context.xmlflush(id) -- xmlsprint or so
end
@@ -827,3 +834,13 @@ function mathml.cpolar_a(root)
end
context.right(false,")")
end
+
+-- crap .. maybe in char-def a mathml overload
+
+local mathmleq = {
+ [utfchar(0x00AF)] = utfchar(0x203E),
+}
+
+function mathml.extensible(chr)
+ context(mathmleq[chr] or chr)
+end
diff --git a/Master/texmf-dist/tex/context/base/x-mathml.mkiv b/Master/texmf-dist/tex/context/base/x-mathml.mkiv
index a4eb0946740..5520dbbe6fc 100644
--- a/Master/texmf-dist/tex/context/base/x-mathml.mkiv
+++ b/Master/texmf-dist/tex/context/base/x-mathml.mkiv
@@ -1,4 +1,4 @@
-%D \module
+%D \modul
%D [ file=x-mathml,
%D version=2008.05.29,
%D title=\CONTEXT\ XML Modules,
@@ -74,17 +74,30 @@
}
}
{
- \MMLhack\xmlflush{#1}
+ \math_fences_checked_start
+ \MMLhack
+ \xmlflush{#1}
+ \math_fences_checked_stop
}
\endgroup
\stopxmlsetups
\startxmlsetups mml:imath
- \inlinemathematics{\MMLhack\xmlflush{#1}}
+ \inlinemathematics {
+ \math_fences_checked_start
+ \MMLhack
+ \xmlflush{#1}
+ \math_fences_checked_stop
+ }
\stopxmlsetups
\startxmlsetups mml:dmath
- \displaymathematics{\MMLhack\xmlflush{#1}}
+ \displaymathematics {
+ \math_fences_checked_start
+ \MMLhack
+ \xmlflush{#1}
+ \math_fences_checked_stop
+ }
\stopxmlsetups
%D First we define some general formula elements.
@@ -96,24 +109,45 @@
\startformula\MMLhack\xmlfirst{#1}{/mml:math}\stopformula
\stopxmlsetups
-\setfalse\mmlignoredelimiter
-\settrue \mmlsomeleftdelimiter
+% old delimiter hacks
+%
+% \setfalse\mmlignoredelimiter
+% \settrue \mmlsomeleftdelimiter
+%
+% \def\MMLleftorright
+% {\ifconditional\mmlsomeleftdelimiter
+% \setfalse\mmlsomeleftdelimiter\expandafter\MMLleft
+% \else
+% \settrue \mmlsomeleftdelimiter\expandafter\MMLright
+% \fi}
+%
+% \ifx\MMLleft \undefined \let\MMLleft \firstofoneargument \fi
+% \ifx\MMLright \undefined \let\MMLright \firstofoneargument \fi
+% \ifx\MMLmiddle\undefined \let\MMLmiddle\firstofoneargument \fi
+%
+% \def\mmlleftdelimiter #1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLleft #1}\fi}
+% \def\mmlrightdelimiter #1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLright #1}\fi}
+% \def\mmlmiddledelimiter #1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLmiddle #1}\fi}
+% \def\mmlleftorrightdelimiter#1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLleftorright#1}\fi}
-\def\MMLleftorright
- {\ifconditional\mmlsomeleftdelimiter
- \setfalse\mmlsomeleftdelimiter\expandafter\MMLleft
- \else
- \settrue \mmlsomeleftdelimiter\expandafter\MMLright
- \fi}
+% new delimiter hacks (assumes wrapping)
+%
+% \math_fences_checked_start
+% \math_fences_checked_stop
+%
+% \math_fences_checked_left
+% \math_fences_checked_middle
+% \math_fences_checked_right
+% \math_fences_checked_left_or_right
-\ifx\MMLleft \undefined \let\MMLleft \firstofoneargument \fi
-\ifx\MMLright \undefined \let\MMLright \firstofoneargument \fi
-\ifx\MMLmiddle\undefined \let\MMLmiddle\firstofoneargument \fi
+\setfalse\mmlignoredelimiter % alternatively we could turn it on/off inside the start/stop and ignore \left\right\middle otherwise
-\def\mmlleftdelimiter #1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLleft #1}\fi}
-\def\mmlrightdelimiter #1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLright #1}\fi}
-\def\mmlmiddledelimiter #1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLmiddle #1}\fi}
-\def\mmlleftorrightdelimiter#1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLleftorright#1}\fi}
+\def\mmlleftdelimiter {\ifconditional\mmlignoredelimiter\else\expandafter\math_fences_checked_left \fi}
+\def\mmlrightdelimiter {\ifconditional\mmlignoredelimiter\else\expandafter\math_fences_checked_right \fi}
+\def\mmlmiddledelimiter {\ifconditional\mmlignoredelimiter\else\expandafter\math_fences_checked_middle \fi}
+\def\mmlleftorrightdelimiter{\ifconditional\mmlignoredelimiter\else\expandafter\math_fences_checked_left_or_right\fi}
+
+% end of delimiter mess
\def\mmlchar#1{\char#1 } % used in lua code
@@ -201,6 +235,10 @@
\def\mmlprelast#1{\xmlelement{#1}{-2}}
\def\mmllast #1{\xmlelement{#1}{-1}}
+\unexpanded\def\mmlunexpandedfirst #1{\xmlelement{#1}{1}} % we can move these inline if needed
+\unexpanded\def\mmlunexpandedsecond #1{\xmlelement{#1}{2}}
+\unexpanded\def\mmlunexpandedthird #1{\xmlelement{#1}{3}}
+
\starttexdefinition doifelsemmlfunction #1
\xmldoifelse {#1} {/mml:fn} {
\firstoftwoarguments
@@ -217,19 +255,6 @@
}
\stoptexdefinition
-%D Special features:
-
- \newtoks \@@postponedMMLactions \setfalse \somepostponedMMLactions
-
- \def\postponeMMLactions#1%
- {\global\settrue\somepostponedMMLactions
- \global\@@postponedMMLactions\expandafter{\the\@@postponedMMLactions#1}}
-
- \def\postponedMMLactions
- {\global\setfalse\somepostponedMMLactions
- \@EA\global\@EA\@@postponedMMLactions\@EA\emptytoks
- \the\@@postponedMMLactions}
-
%D A couple of lists:
\convertargument
@@ -301,6 +326,17 @@
\newcount\mmlapplydepth \def\MMLcreset{\mmlapplydepth\zerocount}
+% \newtoks \@@postponedMMLactions \setfalse \somepostponedMMLactions
+%
+% \def\postponeMMLactions#1%
+% {\global\settrue\somepostponedMMLactions
+% \global\@@postponedMMLactions\expandafter{\the\@@postponedMMLactions#1}}
+%
+% \def\postponedMMLactions
+% {\global\setfalse\somepostponedMMLactions
+% \@EA\global\@EA\@@postponedMMLactions\@EA\emptytoks
+% \the\@@postponedMMLactions}
+
\startxmlsetups mml:apply
\MMLmathinner {
\xmldoif {#1} {/(\MMLcmainresetlist\string|\MMLctempresetlist)} {
@@ -1785,25 +1821,23 @@
\stopxmlsetups
% PRESENTATION MATHML
-%
-% there are some rough edges that need to be sorted out
-
-% helpers
-
-\xmlmapvalue {mml} {normal} {\tf}
-\xmlmapvalue {mml} {double-struck} {\bf}
-\xmlmapvalue {mml} {italic} {\it}
-\xmlmapvalue {mml} {fraktur} {\bf}
-\xmlmapvalue {mml} {script} {\tf}
-\xmlmapvalue {mml} {bold} {\bf}
-\xmlmapvalue {mml} {bold-italic} {\bi}
-\xmlmapvalue {mml} {bold-fraktur} {\bf}
-\xmlmapvalue {mml} {bold-script} {\bf}
-\xmlmapvalue {mml} {sans-serif} {\ss}
-\xmlmapvalue {mml} {bold-sans-serif} {\ss\bf}
-\xmlmapvalue {mml} {sans-serif-italic} {\ss\it}
-\xmlmapvalue {mml} {sans-serif-bold-italic} {\ss\bi}
-\xmlmapvalue {mml} {monospace} {\tt}
+
+% helpers: maybe we can need a setting for the uprights
+
+\xmlmapvalue {mml} {normal} {\mathupright} % {\mathtf}
+\xmlmapvalue {mml} {double-struck} {\mathblackboard}
+\xmlmapvalue {mml} {italic} {\mathit}
+\xmlmapvalue {mml} {fraktur} {\mathfraktur}
+\xmlmapvalue {mml} {script} {\mathscript}
+\xmlmapvalue {mml} {bold} {\mb} % {\mathbf}
+\xmlmapvalue {mml} {bold-italic} {\mathbi}
+\xmlmapvalue {mml} {bold-fraktur} {\mathfraktur\mathbf}
+\xmlmapvalue {mml} {bold-script} {\mathscript\mathbf}
+\xmlmapvalue {mml} {sans-serif} {\mathss}
+\xmlmapvalue {mml} {bold-sans-serif} {\mathss\mathbf}
+\xmlmapvalue {mml} {sans-serif-italic} {\mathss\mathit}
+\xmlmapvalue {mml} {sans-serif-bold-italic} {\mathss\mathbi}
+\xmlmapvalue {mml} {monospace} {\mathtt}
% todo: displaystyle=true/false (or whatever else shows up)
@@ -1863,15 +1897,15 @@
% setups
-\startxmlsetups mml:mi % todo: mathvariant mathsize mathcolor mathbackground
- \ctxmodulemathml{mi("#1")}
+\startxmlsetups mml:mi % todo: mathsize (unlikely) mathcolor (easy) mathbackground (easy)
+ \begingroup
+ \setmmlmathstyle{#1}
+ \ctxmodulemathml{mi("#1")}
+ \endgroup
\stopxmlsetups
-\startxmlsetups mml:mn % todo: mathvariant mathsize mathcolor mathbackground
-% \begingroup
-% \mr
- \ctxmodulemathml{mn("#1")}% no \hbox, would be ok for . , but spoils rest
-% \endgroup
+\startxmlsetups mml:mn
+ \ctxmodulemathml{mn("#1")}% no \hbox, would be ok for . , but spoils rest
\stopxmlsetups
% -2 and 1-2
@@ -1885,13 +1919,20 @@
\setfalse\mmlignoredelimiter
\stopxmlsetups
+% \startxmlsetups mml:mfenced % {} around separator is needed for spacing
+% \def\MMLleft {\left }% weird
+% \def\MMLright {\right}
+% \def\MMLmiddle{\middle}
+% \ctxmodulemathml{mfenced("#1")}
+% \stopxmlsetups
+
\startxmlsetups mml:mfenced % {} around separator is needed for spacing
- \def\MMLleft {\left }% weird
- \def\MMLright {\right}
- \def\MMLmiddle{\middle}
+ \math_fences_checked_start
\ctxmodulemathml{mfenced("#1")}
+ \math_fences_checked_stop
\stopxmlsetups
+
\defineoverlay [mml:enclose:box] [\useMPgraphic{mml:enclose:box}]
\defineoverlay [mml:enclose:roundedbox] [\useMPgraphic{mml:enclose:roundedbox}]
\defineoverlay [mml:enclose:circle] [\useMPgraphic{mml:enclose:circle}]
@@ -2103,18 +2144,32 @@
%
% fails on { ... so we need
+% \startxmlsetups mml:mrow
+% \begingroup
+% \xmldoifelse {#1} {/mml:mo[first() or last()]} {% we need a {}
+% \def\MMLleft {\left }
+% \def\MMLright {\right}
+% \def\MMLmiddle{\middle}
+% \enabledelimiter
+% \checkdelimiters{\xmlall{#1}{/mml:mo}}
+% \fakeleftdelimiter
+% \xmlflush{#1}
+% \fakerightdelimiter
+% \disabledelimiter
+% } {
+% \xmlflush{#1}
+% }
+% \endgroup
+% \stopxmlsetups
+%
+% more modern:
+
\startxmlsetups mml:mrow
\begingroup
\xmldoifelse {#1} {/mml:mo[first() or last()]} {% we need a {}
- \def\MMLleft {\left }
- \def\MMLright {\right}
- \def\MMLmiddle{\middle}
- \enabledelimiter
- \checkdelimiters{\xmlall{#1}{/mml:mo}}
- \fakeleftdelimiter
- \xmlflush{#1}
- \fakerightdelimiter
- \disabledelimiter
+ \math_fences_checked_start
+ \xmlflush{#1}
+ \math_fences_checked_stop
} {
\xmlflush{#1}
}
@@ -2134,6 +2189,8 @@
% brrr no { } when limop .. todo: better in lua
% speed up with ifx and setups or just in lua
+\let\mmlnucleus\relax
+
\startxmlsetups mml:msub
\edef\mmlnucleus{\xmlraw{#1}{/mml:*[1]}}
\doifelse {\utfmathclass\mmlnucleus} {limop} {
@@ -2183,65 +2240,134 @@
\fi\fi
{\csname#1\endcsname}}
+% todo: combine topaccent/over/bottomaccent/under check
+
+\definemathextensible [\v!mathematics] [mml:overleftarrow] ["2190] % ["27F5]
+\definemathextensible [\v!mathematics] [mml:overrightarrow] ["2192] % ["27F6]
+\definemathextensible [\v!mathematics] [mml:overleftrightarrow] ["27F7]
+\definemathextensible [\v!mathematics] [mml:overtwoheadrightarrow] ["27F9]
+\definemathextensible [\v!mathematics] [mml:overleftharpoondown] ["21BD]
+\definemathextensible [\v!mathematics] [mml:overleftharpoonup] ["21BC]
+\definemathextensible [\v!mathematics] [mml:overrightharpoondown] ["21C1]
+\definemathextensible [\v!mathematics] [mml:overrightharpoonup] ["21C0]
+
+\definemathextensible [\v!mathematics] [mml:underleftarrow] ["2190] % ["27F5]
+\definemathextensible [\v!mathematics] [mml:underrightarrow] ["2192] % ["27F6]
+\definemathextensible [\v!mathematics] [mml:underleftrightarrow] ["27F7]
+\definemathextensible [\v!mathematics] [mml:undertwoheadrightarrow] ["27F9]
+\definemathextensible [\v!mathematics] [mml:underleftharpoondown] ["21BD]
+\definemathextensible [\v!mathematics] [mml:underleftharpoonup] ["21BC]
+\definemathextensible [\v!mathematics] [mml:underrightharpoondown] ["21C1]
+\definemathextensible [\v!mathematics] [mml:underrightharpoonup] ["21C0]
+
+\definemathtriplet [\v!mathematics] [mmlovertriplet]
+\definemathtriplet [\v!mathematics] [mmlundertriplet]
+\definemathtriplet [\v!mathematics] [mmldoubletriplet]
+
+% alternative:
+%
+% \definemathextensible [\v!mathematics] [mml:\utfchar{0x2190}] ["2190] % ["27F5]
+% \definemathextensible [\v!mathematics] [mml:\utfchar{0x2192}] ["2192] % ["27F6]
+% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F5}] ["2190] % ["27F5]
+% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F6}] ["2192] % ["27F6]
+% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F7}] ["27F7]
+% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F9}] ["27F9]
+% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21BD}] ["21BD]
+% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21BC}] ["21BC]
+% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21C1}] ["21C1]
+% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21C0}] ["21C0]
+
+\unexpanded\def\mmloverof#1{\mmlexecuteifdefined\mmlovercommand\relax{\mmlunexpandedfirst {#1}}\relax}
+\unexpanded\def\mmloveros#1{\mmlexecuteifdefined\mmlovercommand {\mmlunexpandedsecond{#1}}\relax}
+\unexpanded\def\mmloverbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
+\unexpanded\def\mmloverbs#1{\mmlexecuteifdefined\mmlbasecommand\relax{\mmlunexpandedsecond{#1}}\relax}
+
\startxmlsetups mml:mover
- \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}
- \doifelseutfmathaccentfiltered\mmlovertoken{topaccent} {% not ok
- \edef\mmlovercommand{\utfmathcommandfiltered\mmlovertoken{topaccent}}
- \mmlexecuteifdefined\mmlovercommand\mathematics{\mmlfirst{#1}}
+ \edef\mmlovertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \doifelseutfmathabove\mmlovertoken {
+ \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
+ \mmloverof{#1}
} {
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}
- \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
- \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
- \vbox {
- \mathsurround\zeropoint
- \ialign {
- \hss$\alignmark\alignmark$\hss
- \crcr
- \noalign{\kern3\onepoint}%
- \mmlexecuteifdefined\mmlovercommand{\mmlsecond{#1}}{}% extra {} is safeguard
- \crcr
- \noalign{\kern3\onepoint\nointerlineskip}%
- \mmlexecuteifdefined\mmlbasecommand{\mmlfirst{#1}}{}% extra {} is safeguard
- \crcr
- }
+ \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
+ \doifelseutfmathabove\mmlbasetoken {
+ \edef\mmlbasecommand{mml:\utfmathcommandabove\mmlbasetoken}
+ \mmloverbs{#1}
+ } {
+ \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
+ \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
+ \mmlundertriplet{\mmloverbf{#1}}{\mmloveros{#1}}{}%\relax
}
}
-% \limits % spoils spacing
+ % \limits % spoils spacing
\stopxmlsetups
-% messy: (_
+% alternative:
+%
+% \startxmlsetups mml:mover
+% \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}% /text()
+% \doifelseutfmathabove\mmlovertoken {
+% \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
+% \mmloverof{#1}
+% } {
+% \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]/text()}}
+% \ifcsname mml:\mmlbasetoken\endcsname
+% \csname mml:\mmlbasetoken\endcsname{\mmlunexpandedsecond{#1}}\relax
+% \else
+% \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
+% \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
+% \mmlovertriplet{\mmloveros{#1}}{\mmloverbf{#1}}\relax
+% \fi
+% }
+% % \limits % spoils spacing
+% \stopxmlsetups
+
+% do this in lua
+
+\def\mmlextensible#1{\ctxmodulemathml{extensible(\!!bs#1\!!es)}}
+
+\unexpanded\def\mmlunderuf#1{\mmlexecuteifdefined\mmlundercommand\relax {\mmlunexpandedfirst {#1}}\relax}
+\unexpanded\def\mmlunderus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax}
+\unexpanded\def\mmlunderbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
+%unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax{}{\mmlunexpandedsecond{#1}}\relax}
+\unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax {\mmlunexpandedsecond{#1}}\relax}
\startxmlsetups mml:munder
-% \mathop {
- \edef\mmlundertoken{\xmlraw{#1}{/mml:*[2]}}
- \doifelseutfmathaccentfiltered\mmlundertoken{botaccent} {
- \edef\mmlundercommand{\utfmathcommandfiltered\mmlundertoken{botaccent}}
- \mmlexecuteifdefined\mmlundercommand\mathematics{\mmlfirst{#1}}
+ \edef\mmlundertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \doifelseutfmathbelow\mmlundertoken {%
+ \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
+ \mmlunderuf{#1}
+ } {
+ \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
+ \doifelseutfmathbelow\mmlbasetoken {
+ \edef\mmlbasecommand{mml:\utfmathcommandbelow\mmlbasetoken}
+ \mmlunderbs{#1}
} {
- \edef\mmlbasetoken {\xmlraw{#1}{/mml:*[1]}}
\edef\mmlbasecommand {\utfmathfiller\mmlbasetoken}
\edef\mmlundercommand{\utfmathfiller\mmlundertoken}
- \vtop {
- \mathsurround\zeropoint \ialign {
- \hss$##$\hss
- \crcr
- \mmlexecuteifdefined\mmlbasecommand {\mmlfirst{#1}}
- \crcr
- \noalign{\kern3\onepoint\nointerlineskip}%
- \mmlexecuteifdefined\mmlundercommand{\mmlsecond{#1}}
- \crcr
- \noalign{\kern3\onepoint}
- }
- }
+ \mmlundertriplet{\mmlunderbf{#1}}{}{\mmlunderus{#1}}%\relax
}
-% }
-% \limits % spoils spacing
+ }
+ % \limits % spoils spacing
\stopxmlsetups
+\unexpanded\def\mmlunderoverst#1{\mmlexecuteifdefined\mmlbasecommand \relax{\mmlunexpandedsecond{#1}}{\mmlunexpandedthird{#1}}\relax}
+\unexpanded\def\mmlunderoverbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
+\unexpanded\def\mmlunderoverus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax}
+\unexpanded\def\mmlunderoverot#1{\mmlexecuteifdefined\mmlovercommand {\mmlunexpandedthird {#1}}\relax}
+
\startxmlsetups mml:munderover
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}
- \edef\mmlbasecommand{\utfmathcommand\mmlbasetoken}
- \mmlexecuteifdefined\mmlbasecommand{\mathematics{\mmlfirst{#1}}}\normalsubscript{\mmlsecond{#1}}\normalsuperscript{\mmlthird{#1}}
+ \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
+ \doifelseutfmathbelow\mmlbasetoken {
+ \edef\mmlbasecommand{mml:\utfmathcommandbelow\mmlbasetoken}
+ \mmlunderoverst{#1}
+ } {
+ \edef\mmlundertoken {\xmlraw{#1}{/mml:*[2]}}% /text()
+ \edef\mmlovertoken {\xmlraw{#1}{/mml:*[3]}}% /text()
+ \edef\mmlbasecommand {\utfmathfiller\mmlbasetoken}
+ \edef\mmlundercommand{\utfmathfiller\mmlundertoken}
+ \edef\mmlovercommand {\utfmathfiller\mmlovertoken}
+ \mmldoubletriplet{\mmlunderoverbf{#1}}{\mmlunderoverot{#1}}{\mmlunderoverus{#1}}\relax
+ }
\stopxmlsetups
% tables (mml:mtable, mml:mtr, mml:mlabledtr, mml:mtd)
diff --git a/Master/texmf-dist/tex/context/base/x-set-11.mkiv b/Master/texmf-dist/tex/context/base/x-set-11.mkiv
index a8e62542b54..12854dc92d8 100644
--- a/Master/texmf-dist/tex/context/base/x-set-11.mkiv
+++ b/Master/texmf-dist/tex/context/base/x-set-11.mkiv
@@ -337,11 +337,11 @@
\defineregister
[texmacro]
- [texmacros]
+% [texmacros]
\definesorting
[texcommand]
- [texcommands]
+% [texcommands]
\setupsorting
[texcommand]
@@ -390,6 +390,7 @@
\startxmlsetups xml:setups:register
\xmlsetup{#1}{xml:setups:assemblename}
+ % not really needed if we just use setups
\expanded{\texcommand[stp:x:\currentSETUPfullname]{#1}}
\stopxmlsetups
@@ -404,7 +405,7 @@
\xmlregisterdocumentsetup{setups}{xml:setups:basics}
-\def\loadsetups{\complexorsimple\loadsetups}
+\unexpanded\def\loadsetups{\complexorsimple\loadsetups}
\let\loadedsetups\empty % we load more setups, setups:
@@ -416,7 +417,7 @@
{\doonlyonce{setups:#1}
{\doglobal\prependtocommalist{setups:#1}\loadedsetups % last overloads first
\xmlloadonly{setups:#1}{#1}{setups}%
- \xmlfilter{setups:#1}{interface/command/command(xml:setups:register)}}}} % qualified path saves > 50% runtime
+ \xmlfilter{setups:#1}{/interface/command/command(xml:setups:register)}}}} % qualified path saves > 50% runtime
\newif\ifshortsetup
@@ -447,8 +448,18 @@
% \def\showsetupindeed#1%
% {\xmlfilterlist{\loadedsetups}{interface/command[@name='#1']/command(xml:setups:typeset)}}
+% \def\showsetupindeed#1%
+% {\xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}}
+
+% \setelementnature[setup][display]
+% \setelementnature[setup][mixed]
+
\def\showsetupindeed#1%
- {\xmlfilterlist{\loadedsetups}{interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}}
+ {\startelement[setup][name=#1]%
+ \startelement[noexport][comment={setup definition #1}]
+ \xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}%
+ \stopelement
+ \stopelement}
\unexpanded\def\placesetup {\placelistofsorts[texcommand][\c!criterium=\v!used]}
\unexpanded\def\placeallsetups{\placelistofsorts[texcommand][\c!criterium=\v!all ]}
@@ -475,15 +486,15 @@
\veryraggedright
\doglobal\newcounter\currentSETUPargument
\xdef\maximumSETUPargument{\xmlcount{#1}{/arguments/*}}
+ \edef\currentSETUPhash{\xmlatt{#1}{hash}}
\bgroup
\enablemode[setups-pass-one]%
\doif {\xmlatt{#1}{generated}} {yes} {
\ttsl
}
- \doifelse {\xmlatt{#1}{type}} {environment} {
- \tex{\e!start}
- } {
- \tex{}
+ \letterbackslash
+ \doif {\xmlatt{#1}{type}} {environment} {
+ \e!start
}
\xmlfilter{#1}{/sequence/first()}
\ignorespaces
@@ -530,7 +541,7 @@
\startxmlsetups xml:setups:resolve
\ignorespaces
- \xmlfilterlist{\loadedsetups}{interface/define[@name='\xmlatt{#1}{name}']/first()}
+ \xmlfilterlist{\loadedsetups}{/interface/define[@name='\xmlatt{#1}{name}']/first()}
\stopxmlsetups
%D This is the first pass; here we generate the top line.
@@ -605,7 +616,11 @@
\egroup
\startfirstSETUPcolumn{\showSETUPnumber}%
\ignorespaces
- \xmlflush{#1}
+ \xmldoifelse{#1}{/(parameter|inherit)}{
+ \xmlflush{#1}
+ } {
+ ...
+ }
\let\previousSETUPargument\currentSETUPargument
\stopfirstSETUPcolumn
\blank[\v!halfline]
@@ -631,6 +646,20 @@
\startsecondSETUPcolumn{\c!setup!reserved!{\xmlatt{#1}{name}}}{=}
\ignorespaces
\xmlflush{#1}
+ \doifmode{interface:setup:defaults} {
+ \ifx\currentSETUPhash\empty \else
+ \begingroup
+ % todo, make a one level expansion of parameter
+ \let\emwidth \relax
+ \let\exheight\relax
+ \edef\currentSETUPvalue{\csname named\currentSETUPhash parameter\endcsname\empty{\xmlatt{#1}{name}}}
+ \ifx\currentSETUPvalue\empty \else
+ =\space
+ \detokenize\expandafter{\currentSETUPvalue}
+ \fi
+ \endgroup
+ \fi
+ }
\stopsecondSETUPcolumn
\ignorespaces
\stopxmlsetups
@@ -661,7 +690,7 @@
\secondSETUPcolumn {
\c!setup!text!{\getmessage{setup}{inherits}}
\enspace
- \tex{}
+ \letterbackslash
\xmlatt{#1}{name}
} {}
\ignorespaces
@@ -747,4 +776,75 @@
\def\showSETUPword #1{\showSETUP{#1}{\leftargument...\rightargument} {\leftargument.. ... ..\rightargument}}
\def\showSETUPcontent #1{\showSETUP{#1}{\leftargument...\rightargument} {\leftargument.. ... ..\rightargument}}
+% A prelude to a rewrite and some more:
+
+\definetype[parametercommand][type]
+\definetype[parameterkey] [type]
+\definetype[parametervalue] [type][space=on]
+
+\setuptype[parametercommand] [color=darkmagenta]
+\setuptype[parametervalue] [color=darkyellow]
+
+\startxmlsetups xml:setups:parameters:value
+ \edef\currentsetupparameterkey {\xmlatt{#1}{name}}
+ \edef\currentsetupparametervalue{\csname named\currentsetupparametercategory parameter\endcsname\currentsetupparameterinstance\currentsetupparameterkey}
+ \ifx\currentsetupparameterinstance\empty
+ \expanded {
+ \NC \parameterkey {\currentsetupparameterkey}
+ \NC \parametervalue{\detokenize\expandafter{\currentsetupparametervalue}}
+ \NC \NR
+ }
+ \else\ifx\currentsetupparametervalue\empty
+ \else
+ \edef\currentsetupparameterdefault{\csname named\currentsetupparametercategory parameter\endcsname\empty\currentsetupparameterkey}
+ \ifx\currentsetupparametervalue\currentsetupparameterdefault
+ % skip
+ \else
+ \expanded {
+ \NC \parameterkey {\currentsetupparameterkey}
+ \NC \parametervalue{\detokenize\expandafter{\currentsetupparametervalue}}
+ \NC \NR
+ }
+ \fi
+ \fi\fi
+\stopxmlsetups
+
+\startxmlsetups xml:setups:parameters:values
+ \blank[big]
+ \expanded {
+ \parametercommand {
+ \currentsetupparametercommand
+ \space:\space
+ \ifx\currentsetupparameterinstance\empty
+ defaults
+ \else
+ \currentsetupparameterinstance
+ \fi
+ }
+ }
+ \blank[big,samepage]
+ \starttabulate[|l|p|]
+ \xmlall
+ {#1}
+ {/interface/command[@name=='\currentsetupparametercommand']/arguments/assignments/parameter/command(xml:setups:parameters:value)}
+ \ifnum\noftabulaterows = \zerocount
+ \NC \parameterkey{no specific settings} \NC \NC \NR
+ \fi
+ \stoptabulate
+\stopxmlsetups
+
+\starttexdefinition showrootvalues [#1]
+ \edef\currentsetupparametercategory{#1}
+ \edef\currentsetupparametercommand{setup#1}
+ \let\currentsetupparameterinstance\empty
+ \xmlsetup{\loadedsetups}{xml:setups:parameters:values}
+\stoptexdefinition
+
+\starttexdefinition showinstancevalues [#1]#2[#3]
+ \edef\currentsetupparametercategory{#1}
+ \edef\currentsetupparametercommand{setup#1}
+ \edef\currentsetupparameterinstance{#3}
+ \xmlsetup{\loadedsetups}{xml:setups:parameters:values}
+\stoptexdefinition
+
\protect \endinput
diff --git a/Master/texmf-dist/tex/context/base/x-set-12.mkiv b/Master/texmf-dist/tex/context/base/x-set-12.mkiv
index bfeb0ab54aa..6590bfe9e03 100644
--- a/Master/texmf-dist/tex/context/base/x-set-12.mkiv
+++ b/Master/texmf-dist/tex/context/base/x-set-12.mkiv
@@ -12,6 +12,19 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% included loading overhead
+%
+% 2.55 / 2.40 (luatex)
+% 1.90 / 1.80 (luajittex)
+
+% \newif\ifcachedcommand
+% \newif\ifcalledcommand
+%
+% \cachedcommandtrue
+% \calledcommandtrue
+%
+% \usemodule[speedtest]
+
\usemodule[set-11]
\unprotect
diff --git a/Master/texmf-dist/tex/context/fonts/ebgaramond.lfg b/Master/texmf-dist/tex/context/fonts/ebgaramond.lfg
new file mode 100644
index 00000000000..43cc13c51c6
--- /dev/null
+++ b/Master/texmf-dist/tex/context/fonts/ebgaramond.lfg
@@ -0,0 +1,53 @@
+return {
+ name = "eb garamond",
+ version = "1.00",
+ comment = "Goodies that complement eb garamond.",
+ author = "Hans Hagen",
+ copyright = "ConTeXt development team",
+ designsizes = {
+ ["EBGaramond-Italic"] = {
+ ["8pt"] = "file:EBGaramond08-Italic",
+ ["9pt"] = "file:EBGaramond08-Italic",
+ ["9.5pt"] = "file:EBGaramond08-Italic",
+ ["10pt"] = "file:EBGaramond12-Italic",
+ ["11pt"] = "file:EBGaramond12-Italic",
+ ["12pt"] = "file:EBGaramond12-Italic",
+ default = "file:EBGaramond12-Italic",
+ },
+ ["EBGaramond-Regular"] = {
+ ["8pt"] = "file:EBGaramond08-Regular",
+ ["9pt"] = "file:EBGaramond08-Regular",
+ ["9.5pt"] = "file:EBGaramond08-Regular",
+ ["10pt"] = "file:EBGaramond12-Regular",
+ ["11pt"] = "file:EBGaramond12-Regular",
+ ["12pt"] = "file:EBGaramond12-Regular",
+ default = "file:EBGaramond12-Regular",
+ },
+ ["EBGaramond-SC"] = {
+ ["8pt"] = "file:EBGaramond08-SC",
+ ["9pt"] = "file:EBGaramond08-SC",
+ ["9.5pt"] = "file:EBGaramond08-SC",
+ ["10pt"] = "file:EBGaramond12-SC",
+ ["11pt"] = "file:EBGaramond12-SC",
+ ["12pt"] = "file:EBGaramond12-SC",
+ default = "file:EBGaramond12-SC",
+ },
+ ["EBGaramond-Bold"] = {
+ default = "file:EBGaramond12-Bold",
+ },
+ ["EBGaramond-AllSC"] = {
+ default = "file:EBGaramond12-AllSC",
+ },
+ ["EBGaramond-Initials"] = {
+ default = "file:EBGaramondInitials",
+ },
+ ["EBGaramond-InitialsF1"] = {
+ default = "file:EBGaramondInitialsF1",
+ },
+ ["EBGaramond-InitialsF2"] = {
+ default = "file:EBGaramondInitialsF2",
+ },
+ }
+}
+
+
diff --git a/Master/texmf-dist/tex/context/fonts/euler-math.lfg b/Master/texmf-dist/tex/context/fonts/euler-math.lfg
new file mode 100644
index 00000000000..da7647c5339
--- /dev/null
+++ b/Master/texmf-dist/tex/context/fonts/euler-math.lfg
@@ -0,0 +1,23 @@
+-- this file might go away and is for experiments only
+
+return {
+ name = "euler-math",
+ version = "1.00",
+ comment = "Goodies that complement euler math.",
+ author = "Hans Hagen",
+ copyright = "ConTeXt development team",
+ mathematics = {
+ -- virtuals = {
+ -- ["euler-nova"] = {
+ -- { name = "texgyrepagella-math.otf", main = true, parameters = true },
+ -- { name = "euler.otf", overlay = true }, -- first = 0x1234, last = 0x1256
+ --
+ -- -- { name = "euler.otf", main = true, parameters = true },
+ -- -- { name = "texgyrepagella-math.otf", overlay = true },
+ --
+ -- },
+ -- }
+ }
+}
+
+
diff --git a/Master/texmf-dist/tex/context/fonts/hanbatanglvt.lfg b/Master/texmf-dist/tex/context/fonts/hanbatanglvt.lfg
new file mode 100644
index 00000000000..33374334857
--- /dev/null
+++ b/Master/texmf-dist/tex/context/fonts/hanbatanglvt.lfg
@@ -0,0 +1,30 @@
+-- Maybe some day I will do this more efficient but for the moment it's okay. (We need
+-- access to the names table then.)
+
+local f_uni_base = string.formatters["uni%04X"]
+local f_uni_plus = string.formatters["uni%04X.y%s"]
+
+local function range(first,last)
+ local t = { }
+ for i=first,last do
+ t[#t+1] = f_uni_base(i)
+ for j=0,19 do
+ t[#t+1] = f_uni_plus(i,j)
+ end
+ end
+ return t
+end
+
+return {
+ name = "hanbatanglvt",
+ version = "1.00",
+ comment = "Goodies that complement the hanbatanglvt fonts.",
+ author = "Hans Hagen",
+ colorschemes = {
+ default = {
+ range(0x01100,0x0115F), -- jamo_initial (r/c)
+ range(0x01160,0x011A7), -- jamo_medial (g/m)
+ range(0x011A8,0x011FF), -- jamo_final (b/y)
+ }
+ }
+}
diff --git a/Master/texmf-dist/tex/context/fonts/lm.lfg b/Master/texmf-dist/tex/context/fonts/lm.lfg
index 792e723e82b..8d761471848 100644
--- a/Master/texmf-dist/tex/context/fonts/lm.lfg
+++ b/Master/texmf-dist/tex/context/fonts/lm.lfg
@@ -1,4 +1,4 @@
--- In order to be ale to use beta math fonts, we use our own file name and
+-- In order to be able to use beta math fonts, we use our own file name and
-- always remap.
return {
@@ -11,12 +11,13 @@ return {
tweaks = {
aftercopying = {
mathematics.tweaks.fixbadprime, -- prime is too low
+ -- mathematics.tweaks.fixoverline,
},
},
dimensions = {
-- always applied
--- default = {
--- },
+ -- default = {
+ -- },
-- driven by 'mathdimensions' feature
signs = {
-- set dimensions
diff --git a/Master/texmf-dist/tex/context/fonts/mdbch-math.lfg b/Master/texmf-dist/tex/context/fonts/mdbch-math.lfg
index f4ce245ffb8..c66a3575a22 100644
--- a/Master/texmf-dist/tex/context/fonts/mdbch-math.lfg
+++ b/Master/texmf-dist/tex/context/fonts/mdbch-math.lfg
@@ -1,37 +1,37 @@
-return {
- name = "mdbch-math",
- version = "1.00",
- comment = "Math fonts that complement ITC Charter.",
- author = "Hans, Mojca, Aditya",
- copyright = "ConTeXt development team",
- mathematics = {
- mapfiles = {
- "mdbch.map",
- "mkiv-base.map",
- },
- virtuals = {
- ["mdbch-rm"] = {
- { name = "file:bchr8a", features = "virtualmath", main = true },
- { name = "mdbchr7m", vector = "tex-mr", skewchar=0x7F },
- { name = "mdbchri7m", vector = "tex-mi", skewchar=0x7F },
- { name = "mdbchri7m", vector = "tex-it", skewchar=0x7F },
- { name = "mdbchb7m", vector = "tex-bf", skewchar=0x7F },
- { name = "mdbchbi7m", vector = "tex-bi", skewchar=0x7F },
- { name = "mdbchr7y", vector = "tex-sy", skewchar=0x30, parameters = true },
- { name = "mdbchr7v", vector = "tex-ex", extension = true },
- { name = "mdbchrma", vector = "tex-ma" },
- { name = "mdbchrmb", vector = "tex-mb" },
- },
- ["mdbch-bf"] = {
- { name = "file:bchb8a", features = "virtualmath", main = true },
- { name = "mdbchb7m", vector = "tex-mr", skewchar=0x7F },
- { name = "mdbchbi7m", vector = "tex-mi", skewchar=0x7F },
- { name = "mdbchbi7m", vector = "tex-it", skewchar=0x7F },
- { name = "mdbchb7y", vector = "tex-sy", skewchar=0x30, parameters = true },
- { name = "mdbchb7v", vector = "tex-ex", extension = true },
- { name = "mdbchbma", vector = "tex-ma" },
- { name = "mdbchbmb", vector = "tex-mb" },
- }
- }
- }
-}
+return {
+ name = "mdbch-math",
+ version = "1.00",
+ comment = "Math fonts that complement ITC Charter.",
+ author = "Hans, Mojca, Aditya",
+ copyright = "ConTeXt development team",
+ mathematics = {
+ mapfiles = {
+ "mdbch.map",
+ "mkiv-base.map",
+ },
+ virtuals = {
+ ["mdbch-rm"] = {
+ { name = "file:bchr8a", features = "virtualmath", main = true },
+ { name = "mdbchr7m", vector = "tex-mr", skewchar=0x7F },
+ { name = "mdbchri7m", vector = "tex-mi", skewchar=0x7F },
+ { name = "mdbchri7m", vector = "tex-it", skewchar=0x7F },
+ { name = "mdbchb7m", vector = "tex-bf", skewchar=0x7F },
+ { name = "mdbchbi7m", vector = "tex-bi", skewchar=0x7F },
+ { name = "mdbchr7y", vector = "tex-sy", skewchar=0x30, parameters = true },
+ { name = "mdbchr7v", vector = "tex-ex", extension = true },
+ { name = "mdbchrma", vector = "tex-ma" },
+ { name = "mdbchrmb", vector = "tex-mb" },
+ },
+ ["mdbch-bf"] = {
+ { name = "file:bchb8a", features = "virtualmath", main = true },
+ { name = "mdbchb7m", vector = "tex-mr", skewchar=0x7F },
+ { name = "mdbchbi7m", vector = "tex-mi", skewchar=0x7F },
+ { name = "mdbchbi7m", vector = "tex-it", skewchar=0x7F },
+ { name = "mdbchb7y", vector = "tex-sy", skewchar=0x30, parameters = true },
+ { name = "mdbchb7v", vector = "tex-ex", extension = true },
+ { name = "mdbchbma", vector = "tex-ma" },
+ { name = "mdbchbmb", vector = "tex-mb" },
+ }
+ }
+ }
+}
diff --git a/Master/texmf-dist/tex/context/fonts/mdici-math.lfg b/Master/texmf-dist/tex/context/fonts/mdici-math.lfg
index 8c16b866532..84833c40796 100644
--- a/Master/texmf-dist/tex/context/fonts/mdici-math.lfg
+++ b/Master/texmf-dist/tex/context/fonts/mdici-math.lfg
@@ -1,37 +1,37 @@
-return {
- name = "mdici-math",
- version = "1.00",
- comment = "Math fonts that complement Charter ITC Std.",
- author = "Hans, Mojca, Aditya",
- copyright = "ConTeXt development team",
- mathematics = {
- mapfiles = {
- "mdici.map",
- "mkiv-base.map",
- },
- virtuals = {
- ["mdici-rm"] = {
- { name = "file:bchr8a", features = "virtualmath", main = true },
- { name = "mdicir7m", vector = "tex-mr", skewchar=0x7F },
- { name = "mdiciri7m", vector = "tex-mi", skewchar=0x7F },
- { name = "mdiciri7m", vector = "tex-it", skewchar=0x7F },
- { name = "mdicib7m", vector = "tex-bf", skewchar=0x7F },
- { name = "mdicibi7m", vector = "tex-bi", skewchar=0x7F },
- { name = "mdicir7y", vector = "tex-sy", skewchar=0x30, parameters = true },
- { name = "mdicir7v", vector = "tex-ex", extension = true },
- { name = "mdicirma", vector = "tex-ma" },
- { name = "mdicirmb", vector = "tex-mb" },
- },
- ["mdici-bf"] = {
- { name = "file:bchb8a", features = "virtualmath", main = true },
- { name = "mdicib7m", vector = "tex-mr", skewchar=0x7F },
- { name = "mdicibi7m", vector = "tex-mi", skewchar=0x7F },
- { name = "mdicibi7m", vector = "tex-it", skewchar=0x7F },
- { name = "mdicib7y", vector = "tex-sy", skewchar=0x30, parameters = true },
- { name = "mdicib7v", vector = "tex-ex", extension = true },
- { name = "mdicibma", vector = "tex-ma" },
- { name = "mdicibmb", vector = "tex-mb" },
- }
- }
- }
-}
+return {
+ name = "mdici-math",
+ version = "1.00",
+ comment = "Math fonts that complement Charter ITC Std.",
+ author = "Hans, Mojca, Aditya",
+ copyright = "ConTeXt development team",
+ mathematics = {
+ mapfiles = {
+ "mdici.map",
+ "mkiv-base.map",
+ },
+ virtuals = {
+ ["mdici-rm"] = {
+ { name = "file:bchr8a", features = "virtualmath", main = true },
+ { name = "mdicir7m", vector = "tex-mr", skewchar=0x7F },
+ { name = "mdiciri7m", vector = "tex-mi", skewchar=0x7F },
+ { name = "mdiciri7m", vector = "tex-it", skewchar=0x7F },
+ { name = "mdicib7m", vector = "tex-bf", skewchar=0x7F },
+ { name = "mdicibi7m", vector = "tex-bi", skewchar=0x7F },
+ { name = "mdicir7y", vector = "tex-sy", skewchar=0x30, parameters = true },
+ { name = "mdicir7v", vector = "tex-ex", extension = true },
+ { name = "mdicirma", vector = "tex-ma" },
+ { name = "mdicirmb", vector = "tex-mb" },
+ },
+ ["mdici-bf"] = {
+ { name = "file:bchb8a", features = "virtualmath", main = true },
+ { name = "mdicib7m", vector = "tex-mr", skewchar=0x7F },
+ { name = "mdicibi7m", vector = "tex-mi", skewchar=0x7F },
+ { name = "mdicibi7m", vector = "tex-it", skewchar=0x7F },
+ { name = "mdicib7y", vector = "tex-sy", skewchar=0x30, parameters = true },
+ { name = "mdicib7v", vector = "tex-ex", extension = true },
+ { name = "mdicibma", vector = "tex-ma" },
+ { name = "mdicibmb", vector = "tex-mb" },
+ }
+ }
+ }
+}
diff --git a/Master/texmf-dist/tex/context/fonts/mdpgd-math.lfg b/Master/texmf-dist/tex/context/fonts/mdpgd-math.lfg
index 583870944e8..b7863b46e4a 100644
--- a/Master/texmf-dist/tex/context/fonts/mdpgd-math.lfg
+++ b/Master/texmf-dist/tex/context/fonts/mdpgd-math.lfg
@@ -1,37 +1,37 @@
-return {
- name = "mdpgd-math",
- version = "1.00",
- comment = "Math fonts that complement Adobe Garamond Pro.",
- author = "Hans, Mojca, Aditya",
- copyright = "ConTeXt development team",
- mathematics = {
- mapfiles = {
- "mdpgd.map",
- "mkiv-base.map",
- },
- virtuals = {
- ["mdpgd-rm"] = {
- { name = "file:ugmr8a", features = "virtualmath", main = true },
- { name = "mdpgdr7m", vector = "tex-mr", skewchar=0x7F },
- { name = "mdpgdri7m", vector = "tex-mi", skewchar=0x7F },
- { name = "mdpgdri7m", vector = "tex-it", skewchar=0x7F },
- { name = "mdpgds7m", vector = "tex-bf", skewchar=0x7F },
- { name = "mdpgdsi7m", vector = "tex-bi", skewchar=0x7F },
- { name = "mdpgdr7y", vector = "tex-sy", skewchar=0x30, parameters = true },
- { name = "mdpgdr7v", vector = "tex-ex", extension = true },
- { name = "mdpgdrma", vector = "tex-ma" },
- { name = "mdpgdrmb", vector = "tex-mb" },
- },
- ["mdpgd-bf"] = {
- { name = "file:ugmm8a", features = "virtualmath", main = true },
- { name = "mdpgds7m", vector = "tex-mr", skewchar=0x7F },
- { name = "mdpgdsi7m", vector = "tex-mi", skewchar=0x7F },
- { name = "mdpgdsi7m", vector = "tex-it", skewchar=0x7F },
- { name = "mdpgds7y", vector = "tex-sy", skewchar=0x30, parameters = true },
- { name = "mdpgds7v", vector = "tex-ex", extension = true },
- { name = "mdpgdsma", vector = "tex-ma" },
- { name = "mdpgdsmb", vector = "tex-mb" },
- }
- }
- }
-}
+return {
+ name = "mdpgd-math",
+ version = "1.00",
+ comment = "Math fonts that complement Adobe Garamond Pro.",
+ author = "Hans, Mojca, Aditya",
+ copyright = "ConTeXt development team",
+ mathematics = {
+ mapfiles = {
+ "mdpgd.map",
+ "mkiv-base.map",
+ },
+ virtuals = {
+ ["mdpgd-rm"] = {
+ { name = "file:ugmr8a", features = "virtualmath", main = true },
+ { name = "mdpgdr7m", vector = "tex-mr", skewchar=0x7F },
+ { name = "mdpgdri7m", vector = "tex-mi", skewchar=0x7F },
+ { name = "mdpgdri7m", vector = "tex-it", skewchar=0x7F },
+ { name = "mdpgds7m", vector = "tex-bf", skewchar=0x7F },
+ { name = "mdpgdsi7m", vector = "tex-bi", skewchar=0x7F },
+ { name = "mdpgdr7y", vector = "tex-sy", skewchar=0x30, parameters = true },
+ { name = "mdpgdr7v", vector = "tex-ex", extension = true },
+ { name = "mdpgdrma", vector = "tex-ma" },
+ { name = "mdpgdrmb", vector = "tex-mb" },
+ },
+ ["mdpgd-bf"] = {
+ { name = "file:ugmm8a", features = "virtualmath", main = true },
+ { name = "mdpgds7m", vector = "tex-mr", skewchar=0x7F },
+ { name = "mdpgdsi7m", vector = "tex-mi", skewchar=0x7F },
+ { name = "mdpgdsi7m", vector = "tex-it", skewchar=0x7F },
+ { name = "mdpgds7y", vector = "tex-sy", skewchar=0x30, parameters = true },
+ { name = "mdpgds7v", vector = "tex-ex", extension = true },
+ { name = "mdpgdsma", vector = "tex-ma" },
+ { name = "mdpgdsmb", vector = "tex-mb" },
+ }
+ }
+ }
+}
diff --git a/Master/texmf-dist/tex/context/fonts/mdpus-math.lfg b/Master/texmf-dist/tex/context/fonts/mdpus-math.lfg
index 59b6bc8e921..95d51de1467 100644
--- a/Master/texmf-dist/tex/context/fonts/mdpus-math.lfg
+++ b/Master/texmf-dist/tex/context/fonts/mdpus-math.lfg
@@ -1,37 +1,37 @@
-return {
- name = "mdpus-math",
- version = "1.00",
- comment = "Math fonts that complement Adobe Utopia Std.",
- author = "Hans, Mojca, Aditya",
- copyright = "ConTeXt development team",
- mathematics = {
- mapfiles = {
- "mdpus.map",
- "mkiv-base.map",
- },
- virtuals = {
- ["mdpus-rm"] = {
- { name = "file:putr8a", features = "virtualmath", main = true },
- { name = "mdpusr7m", vector = "tex-mr", skewchar=0x7F },
- { name = "mdpusri7m", vector = "tex-mi", skewchar=0x7F },
- { name = "mdpusri7m", vector = "tex-it", skewchar=0x7F },
- { name = "mdpuss7m", vector = "tex-bf", skewchar=0x7F },
- { name = "mdpussi7m", vector = "tex-bi", skewchar=0x7F },
- { name = "mdpusr7y", vector = "tex-sy", skewchar=0x30, parameters = true },
- { name = "mdpusr7v", vector = "tex-ex", extension = true },
- { name = "mdpusrma", vector = "tex-ma" },
- { name = "mdpusrmb", vector = "tex-mb" },
- },
- ["mdpus-bf"] = {
- { name = "file:putb8a", features = "virtualmath", main = true },
- { name = "mdpuss7m", vector = "tex-mr", skewchar=0x7F },
- { name = "mdpussi7m", vector = "tex-mi", skewchar=0x7F },
- { name = "mdpussi7m", vector = "tex-it", skewchar=0x7F },
- { name = "mdpuss7y", vector = "tex-sy", skewchar=0x30, parameters = true },
- { name = "mdpuss7v", vector = "tex-ex", extension = true },
- { name = "mdpussma", vector = "tex-ma" },
- { name = "mdpussmb", vector = "tex-mb" },
- }
- }
- }
-}
+return {
+ name = "mdpus-math",
+ version = "1.00",
+ comment = "Math fonts that complement Adobe Utopia Std.",
+ author = "Hans, Mojca, Aditya",
+ copyright = "ConTeXt development team",
+ mathematics = {
+ mapfiles = {
+ "mdpus.map",
+ "mkiv-base.map",
+ },
+ virtuals = {
+ ["mdpus-rm"] = {
+ { name = "file:putr8a", features = "virtualmath", main = true },
+ { name = "mdpusr7m", vector = "tex-mr", skewchar=0x7F },
+ { name = "mdpusri7m", vector = "tex-mi", skewchar=0x7F },
+ { name = "mdpusri7m", vector = "tex-it", skewchar=0x7F },
+ { name = "mdpuss7m", vector = "tex-bf", skewchar=0x7F },
+ { name = "mdpussi7m", vector = "tex-bi", skewchar=0x7F },
+ { name = "mdpusr7y", vector = "tex-sy", skewchar=0x30, parameters = true },
+ { name = "mdpusr7v", vector = "tex-ex", extension = true },
+ { name = "mdpusrma", vector = "tex-ma" },
+ { name = "mdpusrmb", vector = "tex-mb" },
+ },
+ ["mdpus-bf"] = {
+ { name = "file:putb8a", features = "virtualmath", main = true },
+ { name = "mdpuss7m", vector = "tex-mr", skewchar=0x7F },
+ { name = "mdpussi7m", vector = "tex-mi", skewchar=0x7F },
+ { name = "mdpussi7m", vector = "tex-it", skewchar=0x7F },
+ { name = "mdpuss7y", vector = "tex-sy", skewchar=0x30, parameters = true },
+ { name = "mdpuss7v", vector = "tex-ex", extension = true },
+ { name = "mdpussma", vector = "tex-ma" },
+ { name = "mdpussmb", vector = "tex-mb" },
+ }
+ }
+ }
+}
diff --git a/Master/texmf-dist/tex/context/fonts/mdput-math.lfg b/Master/texmf-dist/tex/context/fonts/mdput-math.lfg
index 885c726b607..7779ec1526b 100644
--- a/Master/texmf-dist/tex/context/fonts/mdput-math.lfg
+++ b/Master/texmf-dist/tex/context/fonts/mdput-math.lfg
@@ -1,37 +1,37 @@
-return {
- name = "mdput-math",
- version = "1.00",
- comment = "Math fonts that complement Adobe Utopia.",
- author = "Hans, Mojca, Aditya",
- copyright = "ConTeXt development team",
- mathematics = {
- mapfiles = {
- "mdput.map",
- "mkiv-base.map",
- },
- virtuals = {
- ["mdput-rm"] = {
- { name = "file:putr8a", features = "virtualmath", main = true },
- { name = "mdputr7m", vector = "tex-mr", skewchar=0x7F },
- { name = "mdputri7m", vector = "tex-mi", skewchar=0x7F },
- { name = "mdputri7m", vector = "tex-it", skewchar=0x7F },
- { name = "mdputb7m", vector = "tex-bf", skewchar=0x7F },
- { name = "mdputbi7m", vector = "tex-bi", skewchar=0x7F },
- { name = "mdputr7y", vector = "tex-sy", skewchar=0x30, parameters = true },
- { name = "mdputr7v", vector = "tex-ex", extension = true },
- { name = "mdputrma", vector = "tex-ma" },
- { name = "mdputrmb", vector = "tex-mb" },
- },
- ["mdput-bf"] = {
- { name = "file:putb8a", features = "virtualmath", main = true },
- { name = "mdputb7m", vector = "tex-mr", skewchar=0x7F },
- { name = "mdputbi7m", vector = "tex-mi", skewchar=0x7F },
- { name = "mdputbi7m", vector = "tex-it", skewchar=0x7F },
- { name = "mdputb7y", vector = "tex-sy", skewchar=0x30, parameters = true },
- { name = "mdputb7v", vector = "tex-ex", extension = true },
- { name = "mdputbma", vector = "tex-ma" },
- { name = "mdputbmb", vector = "tex-mb" },
- }
- }
- }
-}
+return {
+ name = "mdput-math",
+ version = "1.00",
+ comment = "Math fonts that complement Adobe Utopia.",
+ author = "Hans, Mojca, Aditya",
+ copyright = "ConTeXt development team",
+ mathematics = {
+ mapfiles = {
+ "mdput.map",
+ "mkiv-base.map",
+ },
+ virtuals = {
+ ["mdput-rm"] = {
+ { name = "file:putr8a", features = "virtualmath", main = true },
+ { name = "mdputr7m", vector = "tex-mr", skewchar=0x7F },
+ { name = "mdputri7m", vector = "tex-mi", skewchar=0x7F },
+ { name = "mdputri7m", vector = "tex-it", skewchar=0x7F },
+ { name = "mdputb7m", vector = "tex-bf", skewchar=0x7F },
+ { name = "mdputbi7m", vector = "tex-bi", skewchar=0x7F },
+ { name = "mdputr7y", vector = "tex-sy", skewchar=0x30, parameters = true },
+ { name = "mdputr7v", vector = "tex-ex", extension = true },
+ { name = "mdputrma", vector = "tex-ma" },
+ { name = "mdputrmb", vector = "tex-mb" },
+ },
+ ["mdput-bf"] = {
+ { name = "file:putb8a", features = "virtualmath", main = true },
+ { name = "mdputb7m", vector = "tex-mr", skewchar=0x7F },
+ { name = "mdputbi7m", vector = "tex-mi", skewchar=0x7F },
+ { name = "mdputbi7m", vector = "tex-it", skewchar=0x7F },
+ { name = "mdputb7y", vector = "tex-sy", skewchar=0x30, parameters = true },
+ { name = "mdputb7v", vector = "tex-ex", extension = true },
+ { name = "mdputbma", vector = "tex-ma" },
+ { name = "mdputbmb", vector = "tex-mb" },
+ }
+ }
+ }
+}
diff --git a/Master/texmf-dist/tex/context/fonts/mdugm-math.lfg b/Master/texmf-dist/tex/context/fonts/mdugm-math.lfg
index 0869c84e689..293c99371b8 100644
--- a/Master/texmf-dist/tex/context/fonts/mdugm-math.lfg
+++ b/Master/texmf-dist/tex/context/fonts/mdugm-math.lfg
@@ -1,37 +1,37 @@
-return {
- name = "mdugm-math",
- version = "1.00",
- comment = "Math fonts that complement URW Garamond.",
- author = "Hans, Mojca, Aditya",
- copyright = "ConTeXt development team",
- mathematics = {
- mapfiles = {
- "mdugm.map",
- "mkiv-base.map",
- },
- virtuals = {
- ["mdugm-rm"] = {
- { name = "file:ugmr8a", features = "virtualmath", main = true },
- { name = "mdugmr7m", vector = "tex-mr", skewchar=0x7F },
- { name = "mdugmri7m", vector = "tex-mi", skewchar=0x7F },
- { name = "mdugmri7m", vector = "tex-it", skewchar=0x7F },
- { name = "mdugmm7m", vector = "tex-bf", skewchar=0x7F },
- { name = "mdugmmi7m", vector = "tex-bi", skewchar=0x7F },
- { name = "mdugmr7y", vector = "tex-sy", skewchar=0x30, parameters = true },
- { name = "mdugmr7v", vector = "tex-ex", extension = true },
- { name = "mdugmrma", vector = "tex-ma" },
- { name = "mdugmrmb", vector = "tex-mb" },
- },
- ["mdugm-bf"] = {
- { name = "file:ugmm8a", features = "virtualmath", main = true },
- { name = "mdugmm7m", vector = "tex-mr", skewchar=0x7F },
- { name = "mdugmmi7m", vector = "tex-mi", skewchar=0x7F },
- { name = "mdugmmi7m", vector = "tex-it", skewchar=0x7F },
- { name = "mdugmm7y", vector = "tex-sy", skewchar=0x30, parameters = true },
- { name = "mdugmm7v", vector = "tex-ex", extension = true },
- { name = "mdugmmma", vector = "tex-ma" },
- { name = "mdugmmmb", vector = "tex-mb" },
- }
- }
- }
-}
+return {
+ name = "mdugm-math",
+ version = "1.00",
+ comment = "Math fonts that complement URW Garamond.",
+ author = "Hans, Mojca, Aditya",
+ copyright = "ConTeXt development team",
+ mathematics = {
+ mapfiles = {
+ "mdugm.map",
+ "mkiv-base.map",
+ },
+ virtuals = {
+ ["mdugm-rm"] = {
+ { name = "file:ugmr8a", features = "virtualmath", main = true },
+ { name = "mdugmr7m", vector = "tex-mr", skewchar=0x7F },
+ { name = "mdugmri7m", vector = "tex-mi", skewchar=0x7F },
+ { name = "mdugmri7m", vector = "tex-it", skewchar=0x7F },
+ { name = "mdugmm7m", vector = "tex-bf", skewchar=0x7F },
+ { name = "mdugmmi7m", vector = "tex-bi", skewchar=0x7F },
+ { name = "mdugmr7y", vector = "tex-sy", skewchar=0x30, parameters = true },
+ { name = "mdugmr7v", vector = "tex-ex", extension = true },
+ { name = "mdugmrma", vector = "tex-ma" },
+ { name = "mdugmrmb", vector = "tex-mb" },
+ },
+ ["mdugm-bf"] = {
+ { name = "file:ugmm8a", features = "virtualmath", main = true },
+ { name = "mdugmm7m", vector = "tex-mr", skewchar=0x7F },
+ { name = "mdugmmi7m", vector = "tex-mi", skewchar=0x7F },
+ { name = "mdugmmi7m", vector = "tex-it", skewchar=0x7F },
+ { name = "mdugmm7y", vector = "tex-sy", skewchar=0x30, parameters = true },
+ { name = "mdugmm7v", vector = "tex-ex", extension = true },
+ { name = "mdugmmma", vector = "tex-ma" },
+ { name = "mdugmmmb", vector = "tex-mb" },
+ }
+ }
+ }
+}
diff --git a/Master/texmf-dist/tex/context/fonts/px-math.lfg b/Master/texmf-dist/tex/context/fonts/px-math.lfg
index 2996a55e538..14f71dad379 100644
--- a/Master/texmf-dist/tex/context/fonts/px-math.lfg
+++ b/Master/texmf-dist/tex/context/fonts/px-math.lfg
@@ -10,7 +10,7 @@ return {
},
virtuals = {
["px-math"] = {
- { name = "texgyrepagella-regular.otf", features = "virtualmath", main = true },
+ { name = "texgyre-pagella-math-regular.otf", features = "virtualmath", main = true },
{ name = "texgyrepagella-regular.otf", features = "virtualmath", vector = "tex-mr-missing" } ,
{ name = "rpxr.tfm", vector = "tex-mr" } ,
{ name = "rpxmi.tfm", vector = "tex-mi", skewchar=0x7F },
diff --git a/Master/texmf-dist/tex/context/fonts/texgyre.lfg b/Master/texmf-dist/tex/context/fonts/texgyre.lfg
index 7782aa5093c..7859820376b 100644
--- a/Master/texmf-dist/tex/context/fonts/texgyre.lfg
+++ b/Master/texmf-dist/tex/context/fonts/texgyre.lfg
@@ -26,5 +26,11 @@ return {
"tgbonummath-regular.otf",
"tgbonum-math.otf",
},
+ ["texgyre-schola-math-regular.otf"] = {
+ "texgyreschola-math.otf", -- beta
+ "texgyrescholamath-regular.otf",
+ "tgscholamath-regular.otf",
+ "tgschola-math.otf",
+ },
},
}
diff --git a/Master/texmf-dist/tex/context/fonts/treatments.lfg b/Master/texmf-dist/tex/context/fonts/treatments.lfg
index 22706d6e974..44d24da22a3 100644
--- a/Master/texmf-dist/tex/context/fonts/treatments.lfg
+++ b/Master/texmf-dist/tex/context/fonts/treatments.lfg
@@ -2,6 +2,47 @@
-- the order specified by tree order access. The first treatment of a file
-- always wins, so one can overload. These files are not (to be) loaded with
-- font definitions. (Experiment as part of writing the font manual.)
+--
+-- So there are several ways to fix a font: add a patcher to a goodie file and
+-- load that one. Such a patch can end up in the cached file. Treatments are
+-- applied at runtime. An experimental auto-loaded goodie approach is not yet
+-- enabled and will never be if treatments can do the job.
+
+local report = fonts.treatments.report
+
+local fix_unifraktur = {
+ comment = "suspicious x height",
+ fixes = function(data)
+ local pfminfo = data.metadata.pfminfo
+ if pfminfo then
+ local os2_xheight = pfminfo.os2_xheight
+ if os2_xheight and os2_xheight < 350 then
+ report("suspicious x-height %a, nilling",os2_xheight)
+ pfminfo.os2_xheight_original = os2_xheight
+ pfminfo.os2_xheight = nil
+ end
+ end
+ end,
+}
+
+local fix_lmmonoregular = {
+ comment = "wrong widths of some glyphs",
+ fixes = function(data)
+ report("fixing some wrong widths")
+ local unicodes = data.resources.unicodes
+ local descriptions = data.descriptions
+ local defaultwidth = descriptions[unicodes["zero"]].width
+ descriptions[unicodes["six"] ].width = defaultwidth
+ descriptions[unicodes["nine"] ].width = defaultwidth
+ descriptions[unicodes["caron"] ].width = defaultwidth
+ descriptions[unicodes["perthousand"] ].width = defaultwidth
+ descriptions[unicodes["numero"] ].width = defaultwidth
+ descriptions[unicodes["caron.cap"] ].width = defaultwidth
+ descriptions[unicodes["six.taboldstyle"] ].width = defaultwidth
+ descriptions[unicodes["nine.taboldstyle"]].width = defaultwidth
+ descriptions[unicodes["dollar.oldstyle" ]].width = defaultwidth
+ end
+}
return {
name = "treatments",
@@ -13,18 +54,30 @@ return {
-- we need to complete this list in order to be able to warn
-- users not to include these files unless permitted
["adobeheitistd-regular.otf"] = {
+ comment = "this font is part of acrobat",
ignored = false,
-- included = false, -- not yet
- comment = "this font is part of acrobat",
},
-- just an experiment .. normally no big deal but I ran into
-- such case
["crap.ttf"] = {
- ignored = true,
comment = "a text file with suffix ttf", -- used in test file
+ ignored = true,
+ },
+ ["lingoes.ttf"] = {
+ comment = "bugged file",
+ ignored = true,
},
- ["latinmodern-math.otf"] = {
- comment = "experimental",
- }
+ -- harmless example
+ -- ["copperplatethirtythreebc.ttf"] = {
+ -- comment = "hangs and has no hyphen",
+ -- ignored = true,
+ -- },
+ -- ["latinmodern-math.otf"] = {
+ -- comment = "experimental",
+ -- },
+ ["lmmono12regular.otf"] = fix_lmmonoregular,
+ ["unifrakturcook.ttf"] = fix_unifraktur,
+ ["unifrakturmaguntia.ttf"] = fix_unifraktur,
},
}
diff --git a/Master/texmf-dist/tex/context/fonts/unifraktur.lfg b/Master/texmf-dist/tex/context/fonts/unifraktur.lfg
new file mode 100644
index 00000000000..32ffed928f5
--- /dev/null
+++ b/Master/texmf-dist/tex/context/fonts/unifraktur.lfg
@@ -0,0 +1,23 @@
+-- moved to treatments.lfg
+--
+-- fonts.handlers.otf.enhancers.patches.register("after","check metadata","unifraktur*", function(data,filename)
+-- data.metadata.pfminfo.os2_xheight = nil
+-- end)
+
+return {
+ name = "unicode fraktur",
+ version = "1.00",
+ comment = "Goodies that complement unicode fraktur.",
+ author = "Hans Hagen",
+ copyright = "ConTeXt development team",
+ letterspacing = {
+ -- watch it: zwnj's are used (in the tounicodes too)
+ keptligatures = {
+ ["c_afii301_k.ccmp"] = true, -- ck
+ ["c_afii301_h.ccmp"] = true, -- ch
+ ["t_afii301_z.ccmp"] = true, -- tz
+ ["uniFB05"] = true, -- ſt
+ },
+ }
+}
+
diff --git a/Master/texmf-dist/tex/context/interface/cont-cs.xml b/Master/texmf-dist/tex/context/interface/cont-cs.xml
index bd7dadddf08..afaacb7098b 100644
--- a/Master/texmf-dist/tex/context/interface/cont-cs.xml
+++ b/Master/texmf-dist/tex/context/interface/cont-cs.xml
@@ -10311,10 +10311,10 @@
-
+
-
+
diff --git a/Master/texmf-dist/tex/context/interface/cont-de.xml b/Master/texmf-dist/tex/context/interface/cont-de.xml
index 7b120082ef1..e9771d07a67 100644
--- a/Master/texmf-dist/tex/context/interface/cont-de.xml
+++ b/Master/texmf-dist/tex/context/interface/cont-de.xml
@@ -10311,10 +10311,10 @@
-
+
-
+
diff --git a/Master/texmf-dist/tex/context/interface/cont-en.xml b/Master/texmf-dist/tex/context/interface/cont-en.xml
index a20ac37da81..f00215596ef 100644
--- a/Master/texmf-dist/tex/context/interface/cont-en.xml
+++ b/Master/texmf-dist/tex/context/interface/cont-en.xml
@@ -10311,10 +10311,10 @@
-
+
-
+
diff --git a/Master/texmf-dist/tex/context/interface/cont-fr.xml b/Master/texmf-dist/tex/context/interface/cont-fr.xml
index bfb5c82f475..1cd7fa33a5f 100644
--- a/Master/texmf-dist/tex/context/interface/cont-fr.xml
+++ b/Master/texmf-dist/tex/context/interface/cont-fr.xml
@@ -10311,10 +10311,10 @@
-
+
-
+
diff --git a/Master/texmf-dist/tex/context/interface/cont-it.xml b/Master/texmf-dist/tex/context/interface/cont-it.xml
index 2ede93e6917..d9fe0ac97d3 100644
--- a/Master/texmf-dist/tex/context/interface/cont-it.xml
+++ b/Master/texmf-dist/tex/context/interface/cont-it.xml
@@ -10311,10 +10311,10 @@
-
+
-
+
diff --git a/Master/texmf-dist/tex/context/interface/cont-nl.xml b/Master/texmf-dist/tex/context/interface/cont-nl.xml
index c4ad23ba961..4bfad379829 100644
--- a/Master/texmf-dist/tex/context/interface/cont-nl.xml
+++ b/Master/texmf-dist/tex/context/interface/cont-nl.xml
@@ -6525,7 +6525,7 @@
-
+
@@ -8996,7 +8996,7 @@
-
+
@@ -10311,10 +10311,10 @@
-
+
-
+
diff --git a/Master/texmf-dist/tex/context/interface/cont-pe.xml b/Master/texmf-dist/tex/context/interface/cont-pe.xml
index b4e221171bf..434a328e82b 100644
--- a/Master/texmf-dist/tex/context/interface/cont-pe.xml
+++ b/Master/texmf-dist/tex/context/interface/cont-pe.xml
@@ -10311,10 +10311,10 @@
-
+
-
+
diff --git a/Master/texmf-dist/tex/context/interface/cont-ro.xml b/Master/texmf-dist/tex/context/interface/cont-ro.xml
index 1dbc8fb8566..31ef5d920bd 100644
--- a/Master/texmf-dist/tex/context/interface/cont-ro.xml
+++ b/Master/texmf-dist/tex/context/interface/cont-ro.xml
@@ -10311,10 +10311,10 @@
-
+
-
+
diff --git a/Master/texmf-dist/tex/context/interface/keys-cs.xml b/Master/texmf-dist/tex/context/interface/keys-cs.xml
index ad0cf2dca35..0a0b9b9a639 100644
--- a/Master/texmf-dist/tex/context/interface/keys-cs.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-cs.xml
@@ -284,6 +284,7 @@
+
@@ -417,12 +418,14 @@
+
+
@@ -599,6 +602,12 @@
+
+
+
+
+
+
@@ -614,6 +623,7 @@
+
@@ -636,6 +646,7 @@
+
@@ -688,6 +699,7 @@
+
@@ -717,6 +729,7 @@
+
@@ -753,6 +766,8 @@
+
+
@@ -804,9 +819,12 @@
+
+
+
@@ -824,8 +842,13 @@
+
+
+
+
+
@@ -865,6 +888,7 @@
+
@@ -932,6 +956,8 @@
+
+
@@ -978,6 +1004,8 @@
+
+
@@ -988,7 +1016,11 @@
+
+
+
+
@@ -1019,6 +1051,8 @@
+
+
diff --git a/Master/texmf-dist/tex/context/interface/keys-de.xml b/Master/texmf-dist/tex/context/interface/keys-de.xml
index 5d107ca40c8..28b21b915d6 100644
--- a/Master/texmf-dist/tex/context/interface/keys-de.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-de.xml
@@ -284,6 +284,7 @@
+
@@ -417,12 +418,14 @@
+
+
@@ -599,6 +602,12 @@
+
+
+
+
+
+
@@ -614,6 +623,7 @@
+
@@ -636,6 +646,7 @@
+
@@ -688,6 +699,7 @@
+
@@ -717,6 +729,7 @@
+
@@ -753,6 +766,8 @@
+
+
@@ -804,9 +819,12 @@
+
+
+
@@ -824,8 +842,13 @@
+
+
+
+
+
@@ -865,6 +888,7 @@
+
@@ -932,6 +956,8 @@
+
+
@@ -978,6 +1004,8 @@
+
+
@@ -988,7 +1016,11 @@
+
+
+
+
@@ -1019,6 +1051,8 @@
+
+
diff --git a/Master/texmf-dist/tex/context/interface/keys-en.xml b/Master/texmf-dist/tex/context/interface/keys-en.xml
index d9166d1075d..da433cdeec4 100644
--- a/Master/texmf-dist/tex/context/interface/keys-en.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-en.xml
@@ -284,6 +284,7 @@
+
@@ -417,12 +418,14 @@
+
+
@@ -599,6 +602,12 @@
+
+
+
+
+
+
@@ -614,6 +623,7 @@
+
@@ -636,6 +646,7 @@
+
@@ -688,6 +699,7 @@
+
@@ -717,6 +729,7 @@
+
@@ -753,6 +766,8 @@
+
+
@@ -804,9 +819,12 @@
+
+
+
@@ -824,8 +842,13 @@
+
+
+
+
+
@@ -865,6 +888,7 @@
+
@@ -932,6 +956,8 @@
+
+
@@ -978,6 +1004,8 @@
+
+
@@ -988,7 +1016,11 @@
+
+
+
+
@@ -1019,6 +1051,8 @@
+
+
diff --git a/Master/texmf-dist/tex/context/interface/keys-fr.xml b/Master/texmf-dist/tex/context/interface/keys-fr.xml
index c98826cf36d..6a8eaa9c557 100644
--- a/Master/texmf-dist/tex/context/interface/keys-fr.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-fr.xml
@@ -284,6 +284,7 @@
+
@@ -417,12 +418,14 @@
+
+
@@ -599,6 +602,12 @@
+
+
+
+
+
+
@@ -614,6 +623,7 @@
+
@@ -636,6 +646,7 @@
+
@@ -688,6 +699,7 @@
+
@@ -717,6 +729,7 @@
+
@@ -753,6 +766,8 @@
+
+
@@ -804,9 +819,12 @@
+
+
+
@@ -824,8 +842,13 @@
+
+
+
+
+
@@ -865,6 +888,7 @@
+
@@ -932,6 +956,8 @@
+
+
@@ -978,6 +1004,8 @@
+
+
@@ -988,7 +1016,11 @@
+
+
+
+
@@ -1019,6 +1051,8 @@
+
+
diff --git a/Master/texmf-dist/tex/context/interface/keys-it.xml b/Master/texmf-dist/tex/context/interface/keys-it.xml
index afe3b8360b6..c7c9963189d 100644
--- a/Master/texmf-dist/tex/context/interface/keys-it.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-it.xml
@@ -284,6 +284,7 @@
+
@@ -417,12 +418,14 @@
+
+
@@ -599,6 +602,12 @@
+
+
+
+
+
+
@@ -614,6 +623,7 @@
+
@@ -636,6 +646,7 @@
+
@@ -688,6 +699,7 @@
+
@@ -717,6 +729,7 @@
+
@@ -753,6 +766,8 @@
+
+
@@ -804,9 +819,12 @@
+
+
+
@@ -824,8 +842,13 @@
+
+
+
+
+
@@ -865,6 +888,7 @@
+
@@ -932,6 +956,8 @@
+
+
@@ -978,6 +1004,8 @@
+
+
@@ -988,7 +1016,11 @@
+
+
+
+
@@ -1019,6 +1051,8 @@
+
+
diff --git a/Master/texmf-dist/tex/context/interface/keys-nl.xml b/Master/texmf-dist/tex/context/interface/keys-nl.xml
index 226c9683921..c87088a094e 100644
--- a/Master/texmf-dist/tex/context/interface/keys-nl.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-nl.xml
@@ -284,6 +284,7 @@
+
@@ -417,12 +418,14 @@
+
+
@@ -599,6 +602,12 @@
+
+
+
+
+
+
@@ -614,6 +623,7 @@
+
@@ -636,6 +646,7 @@
+
@@ -688,6 +699,7 @@
+
@@ -717,6 +729,7 @@
+
@@ -753,6 +766,8 @@
+
+
@@ -804,9 +819,12 @@
+
+
+
@@ -824,8 +842,13 @@
+
+
+
+
+
@@ -865,6 +888,7 @@
+
@@ -926,12 +950,14 @@
-
+
+
+
@@ -978,6 +1004,8 @@
+
+
@@ -988,7 +1016,11 @@
+
+
+
+
@@ -1019,6 +1051,8 @@
+
+
@@ -1066,7 +1100,7 @@
-
+
diff --git a/Master/texmf-dist/tex/context/interface/keys-pe.xml b/Master/texmf-dist/tex/context/interface/keys-pe.xml
index 9303c29fd94..8e4d412d50a 100644
--- a/Master/texmf-dist/tex/context/interface/keys-pe.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-pe.xml
@@ -284,6 +284,7 @@
+
@@ -417,12 +418,14 @@
+
+
@@ -599,6 +602,12 @@
+
+
+
+
+
+
@@ -614,6 +623,7 @@
+
@@ -636,6 +646,7 @@
+
@@ -688,6 +699,7 @@
+
@@ -717,6 +729,7 @@
+
@@ -753,6 +766,8 @@
+
+
@@ -804,9 +819,12 @@
+
+
+
@@ -824,8 +842,13 @@
+
+
+
+
+
@@ -865,6 +888,7 @@
+
@@ -932,6 +956,8 @@
+
+
@@ -978,6 +1004,8 @@
+
+
@@ -988,7 +1016,11 @@
+
+
+
+
@@ -1019,6 +1051,8 @@
+
+
diff --git a/Master/texmf-dist/tex/context/interface/keys-ro.xml b/Master/texmf-dist/tex/context/interface/keys-ro.xml
index 29368c9bc64..f9ef01b9fdf 100644
--- a/Master/texmf-dist/tex/context/interface/keys-ro.xml
+++ b/Master/texmf-dist/tex/context/interface/keys-ro.xml
@@ -284,6 +284,7 @@
+
@@ -417,12 +418,14 @@
+
+
@@ -599,6 +602,12 @@
+
+
+
+
+
+
@@ -614,6 +623,7 @@
+
@@ -636,6 +646,7 @@
+
@@ -688,6 +699,7 @@
+
@@ -717,6 +729,7 @@
+
@@ -753,6 +766,8 @@
+
+
@@ -804,9 +819,12 @@
+
+
+
@@ -824,8 +842,13 @@
+
+
+
+
+
@@ -865,6 +888,7 @@
+
@@ -932,6 +956,8 @@
+
+
@@ -978,6 +1004,8 @@
+
+
@@ -988,7 +1016,11 @@
+
+
+
+
@@ -1019,6 +1051,8 @@
+
+
diff --git a/Master/texmf-dist/tex/context/patterns/lang-af.hyp b/Master/texmf-dist/tex/context/patterns/lang-af.hyp
index 869cff9fb6b..b7a8ed85d0b 100644
--- a/Master/texmf-dist/tex/context/patterns/lang-af.hyp
+++ b/Master/texmf-dist/tex/context/patterns/lang-af.hyp
@@ -2,7 +2,191 @@
% for comment and copyright, see lang-af.rme
-% used: a d l n s
+% used: a b c d e f g h i j k l m n o p r s t u v w x y ê ë ô
\hyphenation{
-sandaal}
\ No newline at end of file
+sandaal
+aand-e-tes
+aan-gons
+aan-sit-riem
+af-ets
+af-glooi
+a-fri-kaans-eer-ste-taal-spre-ker
+al-berts-kroon
+al-ler-geen
+baad-jie-mou-e
+baan-vak
+bar-mit-swa
+boek-il-lus-tra-sies
+bruids-ka-mer
+buf-fels-poort
+bur-ger-drag
+chris-sies-meer
+da-gha-men-ger
+dak-oor-hang
+dek-la-ding
+dek-weef-sel
+del-gings-fonds-plan
+deur-swer-we
+di-a-lek-woor-de-boek
+dik-bek-wind
+di-vi-dend-uit-ke-ring
+dom-siek-te
+drie-hoeks-me-ting
+druk-ker-fonts
+ei-en-doms-wê-reld
+eks-trin-sie-ke
+e-lands-kraal
+e-lek-tro-ne-ga-ti-wi-teit
+et-ter-sweer
+fru-ga-le
+gang-lig
+gars-aar
+ge-beds-ket-ting
+ge-bruik-sfeer
+ge-loofs-i-den-ti-teit
+gem-mer-es-sens
+ge-skoert
+ge-slags-om-gang
+ge-voels-kri-tiek
+ge-voels-waar-de
+ge-we-tens-angs
+ge-wrig-smeer
+gras-ta-pyt
+grie-kwa
+groeps-por-tret-te
+grond-sop
+haard-ys-ter
+haat-skrif
+han-dels-per-seel
+han-ger-tjie-hei-de
+har-sing-aan-doe-ning
+hel-toe-gaan-vloek-woor-de
+hond-ag-tig
+ie-der-een
+in-dink
+in-gaar
+in-skink
+in-steek-slot
+in-tap
+in-ter-net-om-ge-wing
+in-tree-da-tum
+in-twy-fel-trek-king
+ja-ka-ran-da-ko-nin-gin
+jong-mei-sie-dro-me
+ka-juits-maat
+ka-li-glas
+kam-de-bo-stink-hout
+ka-me-roens
+kam-sti-ge
+kant-ruit
+ka-ree-paal
+karst-treg-ter
+keg-an-ker
+kies-a-re-as
+klik-spaan
+knal-kwik
+kon-sep-or-don-nan-sie
+kooks-oond
+ko-tan-gens
+kui-pers-am-bag
+kuns-te-ra-de
+kwak-sal-wers-mid-del
+lamp-staan-der
+lands-op-me-ting
+lang-ter-myn-im-pak
+lang-u-re
+la-ven-tel-tak-kies
+le-wens-kiem
+lies-maat
+lig-gaams-tem-pe-ra-tuur
+limf-weef-sel
+lip-om-ly-ner
+maand-uit-ga-we
+man-gaan-staal
+mas-korf
+mens-ky-ker
+me-to-diek-es-sen-sies
+mid-o-se-a-nie-se
+mou-ska-kel
+na-gaan-de
+nar-re-slee
+ne-gev-woes-tyn
+nek-a-re
+om-me-swaai
+on-der-wys-ak-sie
+on-tra-di-si-o-ne-le
+oor-deels-plek
+o-ra-gie
+paarl-oos
+pa-si-ënt-re-kords
+pas-to-rie-tuin
+plan-ke-koors
+poe-toe-pap
+pos-a-dres
+pot-te-bak-kers-oond
+pries-ter-amp
+prins-loo-straat
+proe-gla-sie
+pte-ri-gi-um
+raar-der
+re-gi-ments-kom-man-dant
+regs-en-ti-teit
+res-lap
+rond-skink
+rooi-poot-el-sie
+room-ter-te
+ru-staal
+se-ren-ge-ti
+seun-so-praan
+siek-wees
+sies-tog
+skaats-loop-baan
+skree-tjies
+slet-jie
+snik-warm
+soet-o-lie
+son-de-val
+sor-ghum-oes
+spoed-oe-fe-nin-ge
+spoor-weg-in-kom-ste
+stads-nôi
+sti-let-jie
+stok-siel-sa-lig-al-leen
+straat-slim
+stre-pies-fer-weel
+suid-ein-de
+suip-roes
+swaan-nes-te
+swart-aas-vo-ëls
+swie-ren-ga
+ta-ke-la-sie
+teen-sank-sies
+tip-pe-ra-ry
+tref-en-trap-on-ge-luk
+tui-me-laar-dop
+twis-ap-pel
+tyds-ter-me
+ui-er-sweer
+uit-tree-loon
+val-reep-leer
+ver-band-uit-ma-ker
+ver-koops-wel-sla-e
+ver-sap-te
+ver-val-funk-sie
+ver-wy-sings-half-le-we-in-deks
+vin-ger-e-te
+volks-e-mo-sie
+volks-ka-pi-ta-lis-me
+vol-son-ge
+vond-se
+vos-kop
+vre-des-en-gel
+vre-des-par-ty
+wa-pen-stil-stand
+wa-ter-ver-koe-lings-to-rings
+we-du-wee-skof
+wel-oor-wo-ë
+werp-lood
+wes-oe-wer
+xho-sa-tjie}
\ No newline at end of file
diff --git a/Master/texmf-dist/tex/context/patterns/lang-af.lua b/Master/texmf-dist/tex/context/patterns/lang-af.lua
index 2a3b6a49eb4..31becd0bab4 100644
--- a/Master/texmf-dist/tex/context/patterns/lang-af.lua
+++ b/Master/texmf-dist/tex/context/patterns/lang-af.lua
@@ -1,10 +1,10 @@
return {
["comment"]="% generated by mtxrun --script pattern --convert",
["exceptions"]={
- ["characters"]="adlns",
- ["data"]="sandaal",
- ["length"]=7,
- ["n"]=1,
+ ["characters"]="abcdefghijklmnoprstuvwxyêëô",
+ ["data"]="sandaal aand-e-tes aan-gons aan-sit-riem af-ets af-glooi a-fri-kaans-eer-ste-taal-spre-ker al-berts-kroon al-ler-geen baad-jie-mou-e baan-vak bar-mit-swa boek-il-lus-tra-sies bruids-ka-mer buf-fels-poort bur-ger-drag chris-sies-meer da-gha-men-ger dak-oor-hang dek-la-ding dek-weef-sel del-gings-fonds-plan deur-swer-we di-a-lek-woor-de-boek dik-bek-wind di-vi-dend-uit-ke-ring dom-siek-te drie-hoeks-me-ting druk-ker-fonts ei-en-doms-wê-reld eks-trin-sie-ke e-lands-kraal e-lek-tro-ne-ga-ti-wi-teit et-ter-sweer fru-ga-le gang-lig gars-aar ge-beds-ket-ting ge-bruik-sfeer ge-loofs-i-den-ti-teit gem-mer-es-sens ge-skoert ge-slags-om-gang ge-voels-kri-tiek ge-voels-waar-de ge-we-tens-angs ge-wrig-smeer gras-ta-pyt grie-kwa groeps-por-tret-te grond-sop haard-ys-ter haat-skrif han-dels-per-seel han-ger-tjie-hei-de har-sing-aan-doe-ning hel-toe-gaan-vloek-woor-de hond-ag-tig ie-der-een in-dink in-gaar in-skink in-steek-slot in-tap in-ter-net-om-ge-wing in-tree-da-tum in-twy-fel-trek-king ja-ka-ran-da-ko-nin-gin jong-mei-sie-dro-me ka-juits-maat ka-li-glas kam-de-bo-stink-hout ka-me-roens kam-sti-ge kant-ruit ka-ree-paal karst-treg-ter keg-an-ker kies-a-re-as klik-spaan knal-kwik kon-sep-or-don-nan-sie kooks-oond ko-tan-gens kui-pers-am-bag kuns-te-ra-de kwak-sal-wers-mid-del lamp-staan-der lands-op-me-ting lang-ter-myn-im-pak lang-u-re la-ven-tel-tak-kies le-wens-kiem lies-maat lig-gaams-tem-pe-ra-tuur limf-weef-sel lip-om-ly-ner maand-uit-ga-we man-gaan-staal mas-korf mens-ky-ker me-to-diek-es-sen-sies mid-o-se-a-nie-se mou-ska-kel na-gaan-de nar-re-slee ne-gev-woes-tyn nek-a-re om-me-swaai on-der-wys-ak-sie on-tra-di-si-o-ne-le oor-deels-plek o-ra-gie paarl-oos pa-si-ënt-re-kords pas-to-rie-tuin plan-ke-koors poe-toe-pap pos-a-dres pot-te-bak-kers-oond pries-ter-amp prins-loo-straat proe-gla-sie pte-ri-gi-um raar-der re-gi-ments-kom-man-dant regs-en-ti-teit res-lap rond-skink rooi-poot-el-sie room-ter-te ru-staal se-ren-ge-ti seun-so-praan siek-wees sies-tog skaats-loop-baan skree-tjies slet-jie snik-warm soet-o-lie son-de-val sor-ghum-oes spoed-oe-fe-nin-ge spoor-weg-in-kom-ste stads-nôi sti-let-jie stok-siel-sa-lig-al-leen straat-slim stre-pies-fer-weel suid-ein-de suip-roes swaan-nes-te swart-aas-vo-ëls swie-ren-ga ta-ke-la-sie teen-sank-sies tip-pe-ra-ry tref-en-trap-on-ge-luk tui-me-laar-dop twis-ap-pel tyds-ter-me ui-er-sweer uit-tree-loon val-reep-leer ver-band-uit-ma-ker ver-koops-wel-sla-e ver-sap-te ver-val-funk-sie ver-wy-sings-half-le-we-in-deks vin-ger-e-te volks-e-mo-sie volks-ka-pi-ta-lis-me vol-son-ge vond-se vos-kop vre-des-en-gel vre-des-par-ty wa-pen-stil-stand wa-ter-ver-koe-lings-to-rings we-du-wee-skof wel-oor-wo-ë werp-lood wes-oe-wer xho-sa-tjie",
+ ["length"]=2713,
+ ["n"]=185,
},
["metadata"]={
["mnemonic"]="af",
@@ -23,10 +23,15 @@ return {
% and version 1.3 or later is part of all distributions of LaTeX\
% version 2005/12/01 or later.\
% -----------------------------------------------------------------\
-% VERSION: 0.9 (October 2010)\
+% These patterns were generated with Opatgen from a lexicon\
+% of 183 000 syllabified Afrikaans words.\
+% -----------------------------------------------------------------\
+% VERSION: 1.0 (September 2013)\
+%\
+% AUTHORS: Tilla Fick (fickm@unisa.ac.za)\
+% and\
+% Chris Swanepoel (swanecj@unisa.ac.za)\
%\
-% AUTHORS: Tilla Fick and Chris Swanepoel\
-% hyphen at rekenaar dot net\
% -----------------------------------------------------------------\
%\
% Define lefthyphenmin to one and righthyphenmin to two\
diff --git a/Master/texmf-dist/tex/context/patterns/lang-af.rme b/Master/texmf-dist/tex/context/patterns/lang-af.rme
index da31b9d23cd..33d7d34bbb1 100644
--- a/Master/texmf-dist/tex/context/patterns/lang-af.rme
+++ b/Master/texmf-dist/tex/context/patterns/lang-af.rme
@@ -14,10 +14,15 @@ Afrikaans hyphenation patterns
% and version 1.3 or later is part of all distributions of LaTeX
% version 2005/12/01 or later.
% -----------------------------------------------------------------
-% VERSION: 0.9 (October 2010)
+% These patterns were generated with Opatgen from a lexicon
+% of 183 000 syllabified Afrikaans words.
+% -----------------------------------------------------------------
+% VERSION: 1.0 (September 2013)
+%
+% AUTHORS: Tilla Fick (fickm@unisa.ac.za)
+% and
+% Chris Swanepoel (swanecj@unisa.ac.za)
%
-% AUTHORS: Tilla Fick and Chris Swanepoel
-% hyphen at rekenaar dot net
% -----------------------------------------------------------------
%
% Define lefthyphenmin to one and righthyphenmin to two
diff --git a/Master/texmf-dist/tex/context/patterns/lang-it.lua b/Master/texmf-dist/tex/context/patterns/lang-it.lua
index 20ab48fbfd4..fb6a9d893a4 100644
--- a/Master/texmf-dist/tex/context/patterns/lang-it.lua
+++ b/Master/texmf-dist/tex/context/patterns/lang-it.lua
@@ -38,7 +38,7 @@ return {
%\
% This work consists of the single file hyph-it.tex.\
%\
-% \\versionnumber{4.8i} \\versiondate{2011/08/16}\
+% \\versionnumber{4.9} \\versiondate{2014/04/22}\
%\
% These hyphenation patterns for the Italian language are supposed to comply\
% with the Recommendation UNI 6461 on hyphenation issued by the Italian\
@@ -47,6 +47,7 @@ return {
% liability is disclaimed.\
%\
% ChangeLog:\
+% - 2014-04-22 - Add few pattherns involving `h'\
% - 2011-08-16 - Change the licence from GNU LGPL into LPPL v1.3.\
% - 2010-05-24 - Fix for Italian patterns for proper hyphenation of -ich and Ljubljana.\
% - 2008-06-09 - Import of original ithyph.tex into hyph-utf8 package.\
@@ -56,11 +57,11 @@ return {
},
["patterns"]={
["characters"]="'abcdefghijklmnopqrstuvwxyz’",
- ["data"]=".a3p2n .anti1 .anti3m2n .bio1 .ca4p3s .circu2m1 .contro1 .di2s3cine .e2x1eu .fran2k3 .free3 .li3p2sa .narco1 .opto1 .orto3p2 .para1 .poli3p2 .pre1 .p2s .re1i2scr .sha2re3 .tran2s3c .tran2s3d .tran2s3l .tran2s3n .tran2s3p .tran2s3r .tran2s3t .su2b3lu .su2b3r .wa2g3n .wel2t1 2'2 2’2 a1ia a1ie a1io a1iu a1uo a1ya 2at. e1iu e2w o1ia o1ie o1io o1iu 1b 2bb 2bc 2bd 2bf 2bm 2bn 2bp 2bs 2bt 2bv b2l b2r 2b. 2b' 2b’ 1c 2cb 2cc 2cd 2cf 2ck 2cm 2cn 2cq 2cs 2ct 2cz 2chh c2h 2ch. 2ch'. 2ch’. 2ch''. 2ch’’. 2chb ch2r 2chn c2l c2r 2c. 2c' 2c’ .c2 1d 2db 2dd 2dg 2dl 2dm 2dn 2dp d2r 2ds 2dt 2dv 2dw 2d. 2d' 2d’ .d2 1f 2fb 2fg 2ff 2fn f2l f2r 2fs 2ft 2f. 2f' 2f’ 1g 2gb 2gd 2gf 2gg g2h g2l 2gm g2n 2gp g2r 2gs 2gt 2gv 2gw 2gz 2gh2t 2g. 2g' 2g’ 1h 2hb 2hd 2hh hi3p2n h2l 2hm 2hn 2hr 2hv 2h. 2h' 2h’ 1j 2j. 2j' 2j’ 1k 2kg 2kf k2h 2kk k2l 2km k2r 2ks 2kt 2k. 2k' 2k’ 1l 2lb 2lc 2ld 2l3f2 2lg l2h l2j 2lk 2ll 2lm 2ln 2lp 2lq 2lr 2ls 2lt 2lv 2lw 2lz 2l. 2l'. 2l’. 2l'' 2l’’ 1m 2mb 2mc 2mf 2ml 2mm 2mn 2mp 2mq 2mr 2ms 2mt 2mv 2mw 2m. 2m' 2m’ 1n 2nb 2nc 2nd 2nf 2ng 2nk 2nl 2nm 2nn 2np 2nq 2nr 2ns n2s3fer 2nt 2nv 2nz n2g3n 2nheit 2n. 2n' 2n’ 1p 2pd p2h p2l 2pn 3p2ne 2pp p2r 2ps 3p2sic 2pt 2pz 2p. 2p' 2p’ 1q 2qq 2q. 2q' 2q’ 1r 2rb 2rc 2rd 2rf r2h 2rg 2rk 2rl 2rm 2rn 2rp 2rq 2rr 2rs 2rt r2t2s3 2rv 2rx 2rw 2rz 2r. 2r' 2r’ 1s2 2shm 2sh. 2sh' 2sh’ 2s3s s4s3m 2s3p2n 2stb 2stc 2std 2stf 2stg 2stm 2stn 2stp 2sts 2stt 2stv 2sz 4s. 4s'. 4s’. 4s'' 4s’’ 1t 2tb 2tc 2td 2tf 2tg t2h t2l 2tm 2tn 2tp t2r t2s 3t2sch 2tt t2t3s 2tv 2tw t2z 2tzk tz2s 2t. 2t'. 2t’. 2t'' 2t’’ 1v 2vc v2l v2r 2vv 2v. 2v'. 2v’. 2v'' 2v’’ 1w w2h wa2r 2w1y 2w. 2w' 2w’ 1x 2xb 2xc 2xf 2xh 2xm 2xp 2xt 2xw 2x. 2x' 2x’ y1ou y1i 1z 2zb 2zd 2zl 2zn 2zp 2zt 2zs 2zv 2zz 2z. 2z'. 2z’. 2z'' 2z’’ .z2",
- ["length"]=1806,
+ ["data"]=".a3p2n .anti1 .anti3m2n .bio1 .ca4p3s .circu2m1 .contro1 .di2s3cine .e2x1eu .fran2k3 .free3 .li3p2sa .narco1 .opto1 .orto3p2 .para1 .ph2l .ph2r .poli3p2 .pre1 .p2s .re1i2scr .sha2re3 .tran2s3c .tran2s3d .tran2s3l .tran2s3n .tran2s3p .tran2s3r .tran2s3t .su2b3lu .su2b3r .wa2g3n .wel2t1 2'2 2’2 a1ia a1ie a1io a1iu a1uo a1ya 2at. e1iu e2w o1ia o1ie o1io o1iu 1b 2bb 2bc 2bd 2bf 2bm 2bn 2bp 2bs 2bt 2bv b2l b2r 2b. 2b' 2b’ 1c 2cb 2cc 2cd 2cf 2ck 2cm 2cn 2cq 2cs 2ct 2cz 2chh c2h 2ch. 2ch'. 2ch’. 2ch''. 2ch’’. 2chb ch2r 2chn c2l c2r 2c. 2c' 2c’ .c2 1d 2db 2dd 2dg 2dl 2dm 2dn 2dp d2r 2ds 2dt 2dv 2dw 2d. 2d' 2d’ .d2 1f 2fb 2fg 2ff 2fn f2l f2r 2fs 2ft 2f. 2f' 2f’ 1g 2gb 2gd 2gf 2gg g2h g2l 2gm g2n 2gp g2r 2gs 2gt 2gv 2gw 2gz 2gh2t 2g. 2g' 2g’ .h2 1h 2hb 2hd 2hh hi3p2n h2l 2hm 2hn 2hr 2hv 2h. 2h' 2h’ .j2 1j 2j. 2j' 2j’ .k2 1k 2kg 2kf k2h 2kk k2l 2km k2r 2ks 2kt 2k. 2k' 2k’ 1l 2lb 2lc 2ld 2l3f2 2lg l2h l2j 2lk 2ll 2lm 2ln 2lp 2lq 2lr 2ls 2lt 2lv 2lw 2lz 2l. 2l'. 2l’. 2l'' 2l’’ 1m 2mb 2mc 2mf 2ml 2mm 2mn 2mp 2mq 2mr 2ms 2mt 2mv 2mw 2m. 2m' 2m’ 1n 2nb 2nc 2nd 2nf 2ng 2nk 2nl 2nm 2nn 2np 2nq 2nr 2ns n2s3fer 2nt 2nv 2nz n2g3n 2nheit 2n. 2n' 2n’ 1p 2pd p2h p2l 2pn 3p2ne 2pp p2r 2ps 3p2sic 2pt 2pz 2p. 2p' 2p’ 1q 2qq 2q. 2q' 2q’ 1r 2rb 2rc 2rd 2rf r2h 2rg 2rk 2rl 2rm 2rn 2rp 2rq 2rr 2rs 2rt r2t2s3 2rv 2rx 2rw 2rz 2r. 2r' 2r’ 1s2 2shm 2sh. 2sh' 2sh’ 2s3s s4s3m 2s3p2n 2stb 2stc 2std 2stf 2stg 2stm 2stn 2stp 2sts 2stt 2stv 2sz 4s. 4s'. 4s’. 4s'' 4s’’ .t2 1t 2tb 2tc 2td 2tf 2tg t2h 2th. t2l 2tm 2tn 2tp t2r t2s 3t2sch 2tt t2t3s 2tv 2tw t2z 2tzk tz2s 2t. 2t'. 2t’. 2t'' 2t’’ 1v 2vc v2l v2r 2vv 2v. 2v'. 2v’. 2v'' 2v’’ 1w w2h wa2r 2w1y 2w. 2w' 2w’ 1x 2xb 2xc 2xf 2xh 2xm 2xp 2xt 2xw 2x. 2x' 2x’ y1ou y1i 1z 2zb 2zd 2zl 2zn 2zp 2zt 2zs 2zv 2zz 2z. 2z'. 2z’. 2z'' 2z’’ .z2",
+ ["length"]=1839,
["minhyphenmax"]=1,
["minhyphenmin"]=1,
- ["n"]=377,
+ ["n"]=384,
},
["version"]="1.001",
}
\ No newline at end of file
diff --git a/Master/texmf-dist/tex/context/patterns/lang-it.pat b/Master/texmf-dist/tex/context/patterns/lang-it.pat
index 78a127aa7e4..12a9edf33b4 100644
--- a/Master/texmf-dist/tex/context/patterns/lang-it.pat
+++ b/Master/texmf-dist/tex/context/patterns/lang-it.pat
@@ -21,6 +21,8 @@
.opto1
.orto3p2
.para1
+.ph2l
+.ph2r
.poli3p2
.pre1
.p2s
@@ -137,6 +139,7 @@ g2r
2gh2t
2g.
2g'
+.h2
1h
2hb
2hd
@@ -149,9 +152,11 @@ h2l
2hv
2h.
2h'
+.j2
1j
2j.
2j'
+.k2
1k
2kg
2kf
@@ -288,6 +293,7 @@ s4s3m
4s.
4s'.
4s''
+.t2
1t
2tb
2tc
@@ -295,6 +301,7 @@ s4s3m
2tf
2tg
t2h
+2th.
t2l
2tm
2tn
diff --git a/Master/texmf-dist/tex/context/patterns/lang-it.rme b/Master/texmf-dist/tex/context/patterns/lang-it.rme
index 6cfe6896a06..2a2fb60d567 100644
--- a/Master/texmf-dist/tex/context/patterns/lang-it.rme
+++ b/Master/texmf-dist/tex/context/patterns/lang-it.rme
@@ -32,7 +32,7 @@ Italian hyphenation patterns
%
% This work consists of the single file hyph-it.tex.
%
-% \versionnumber{4.8i} \versiondate{2011/08/16}
+% \versionnumber{4.9} \versiondate{2014/04/22}
%
% These hyphenation patterns for the Italian language are supposed to comply
% with the Recommendation UNI 6461 on hyphenation issued by the Italian
@@ -41,6 +41,7 @@ Italian hyphenation patterns
% liability is disclaimed.
%
% ChangeLog:
+% - 2014-04-22 - Add few pattherns involving `h'
% - 2011-08-16 - Change the licence from GNU LGPL into LPPL v1.3.
% - 2010-05-24 - Fix for Italian patterns for proper hyphenation of -ich and Ljubljana.
% - 2008-06-09 - Import of original ithyph.tex into hyph-utf8 package.
diff --git a/Master/texmf-dist/tex/context/patterns/lang-ml.hyp b/Master/texmf-dist/tex/context/patterns/lang-ml.hyp
new file mode 100644
index 00000000000..7dba49173d1
--- /dev/null
+++ b/Master/texmf-dist/tex/context/patterns/lang-ml.hyp
@@ -0,0 +1,8 @@
+% generated by mtxrun --script pattern --convert
+
+% for comment and copyright, see lang-ml.rme
+
+% used:
+
+\hyphenation{
+}
\ No newline at end of file
diff --git a/Master/texmf-dist/tex/context/patterns/lang-ml.lua b/Master/texmf-dist/tex/context/patterns/lang-ml.lua
new file mode 100644
index 00000000000..042cdf5df72
--- /dev/null
+++ b/Master/texmf-dist/tex/context/patterns/lang-ml.lua
@@ -0,0 +1,45 @@
+return {
+ ["comment"]="% generated by mtxrun --script pattern --convert",
+ ["exceptions"]={
+ ["n"]=0,
+ },
+ ["metadata"]={
+ ["mnemonic"]="ml",
+ ["source"]="hyph-ml",
+ ["texcomment"]="% Malayalam hyphenation patterns\
+% \
+% (more info about the licence to be added later)\
+% \
+% These patterns originate from\
+% http://git.savannah.gnu.org/cgit/smc/hyphenation.git/tree/)\
+% and have been adapted for hyph-utf8 (for use in TeX).\
+%\
+% Hyphenation for Malayalam\
+% Copyright (C) 2008-2010 Santhosh Thottingal \
+%\
+% This library is free software; you can redistribute it and/or\
+% modify it under the terms of the GNU Lesser General Public\
+% License as published by the Free Software Foundation;\
+% version 3 or later version of the License.\
+%\
+% This library is distributed in the hope that it will be useful,\
+% but WITHOUT ANY WARRANTY; without even the implied warranty of\
+% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\
+% Lesser General Public License for more details.\
+%\
+% You should have received a copy of the GNU Lesser General Public\
+% License along with this library; if not, write to the Free Software\
+% Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA\
+%\
+% ",
+ },
+ ["patterns"]={
+ ["characters"]="ംഃഅആഇഈഉഊഋഌഎഏഐഒഓഔകഖഗഘങചഛജഝഞടഠഡഢണതഥദധനപഫബഭമയരറലളഴവശഷസഹാിീുൂൃെേൈൊോൌ്ൗൠൡൺൻർൽൾൿ",
+ ["data"]="22 11 1അ1 1ആ1 1ഇ1 1ഈ1 1ഉ1 1ഊ1 1ഋ1 1ൠ1 1ഌ1 1ൡ1 1എ1 1ഏ1 1ഐ1 1ഒ1 1ഓ1 1ഔ1 ാ1 ി1 ീ1 ു1 ൂ1 ൃ1 െ1 േ1 ൈ1 ൊ1 ോ1 ൌ1 ൗ1 1ക 1ഖ 1ഗ 1ഘ 1ങ 1ച 1ഛ 1ജ 1ഝ 1ഞ 1ട 1ഠ 1ഡ 1ഢ 1ണ 1ത 1ഥ 1ദ 1ധ 1ന 1പ 1ഫ 1ബ 1ഭ 1മ 1യ 1ര 1റ 1ല 1ള 1ഴ 1വ 1ശ 1ഷ 1സ 1ഹ 2ഃ1 2ം1 2്2 ന്2 ര്2 ള്2 ല്2 ക്2 ണ്2 2ന് 2ല് 2ള് 2ണ് 2ര് 2ക് 2ൺ 2ൻ 2ർ 2ൽ 2ൾ 2ൿ",
+ ["length"]=514,
+ ["minhyphenmax"]=1,
+ ["minhyphenmin"]=1,
+ ["n"]=88,
+ },
+ ["version"]="1.001",
+}
\ No newline at end of file
diff --git a/Master/texmf-dist/tex/context/patterns/lang-ml.pat b/Master/texmf-dist/tex/context/patterns/lang-ml.pat
new file mode 100644
index 00000000000..b53251a69fa
--- /dev/null
+++ b/Master/texmf-dist/tex/context/patterns/lang-ml.pat
@@ -0,0 +1,95 @@
+% generated by mtxrun --script pattern --convert
+
+% for comment and copyright, see lang-ml.rme
+
+% used: ം ഃ അ ആ ഇ ഈ ഉ ഊ ഋ ഌ എ ഏ ഐ ഒ ഓ ഔ ക ഖ ഗ ഘ ങ ച ഛ ജ ഝ ഞ ട ഠ ഡ ഢ ണ ത ഥ ദ ധ ന പ ഫ ബ ഭ മ യ ര റ ല ള ഴ വ ശ ഷ സ ഹ ാ ി ീ ു ൂ ൃ െ േ ൈ ൊ ോ ൌ ് ൗ ൠ ൡ ൺ ൻ ർ ൽ ൾ ൿ
+
+\patterns{
+22
+11
+1അ1
+1ആ1
+1ഇ1
+1ഈ1
+1ഉ1
+1ഊ1
+1ഋ1
+1ൠ1
+1ഌ1
+1ൡ1
+1എ1
+1ഏ1
+1ഐ1
+1ഒ1
+1ഓ1
+1ഔ1
+ാ1
+ി1
+ീ1
+ു1
+ൂ1
+ൃ1
+െ1
+േ1
+ൈ1
+ൊ1
+ോ1
+ൌ1
+ൗ1
+1ക
+1ഖ
+1ഗ
+1ഘ
+1ങ
+1ച
+1ഛ
+1ജ
+1ഝ
+1ഞ
+1ട
+1ഠ
+1ഡ
+1ഢ
+1ണ
+1ത
+1ഥ
+1ദ
+1ധ
+1ന
+1പ
+1ഫ
+1ബ
+1ഭ
+1മ
+1യ
+1ര
+1റ
+1ല
+1ള
+1ഴ
+1വ
+1ശ
+1ഷ
+1സ
+1ഹ
+2ഃ1
+2ം1
+2്2
+ന്2
+ര്2
+ള്2
+ല്2
+ക്2
+ണ്2
+2ന്
+2ല്
+2ള്
+2ണ്
+2ര്
+2ക്
+2ൺ
+2ൻ
+2ർ
+2ൽ
+2ൾ
+2ൿ}
\ No newline at end of file
diff --git a/Master/texmf-dist/tex/context/patterns/lang-ml.rme b/Master/texmf-dist/tex/context/patterns/lang-ml.rme
new file mode 100644
index 00000000000..4fa574fe059
--- /dev/null
+++ b/Master/texmf-dist/tex/context/patterns/lang-ml.rme
@@ -0,0 +1,27 @@
+% generated by mtxrun --script pattern --convert
+
+Malayalam hyphenation patterns
+
+(more info about the licence to be added later)
+
+% These patterns originate from
+% http://git.savannah.gnu.org/cgit/smc/hyphenation.git/tree/)
+% and have been adapted for hyph-utf8 (for use in TeX).
+%
+% Hyphenation for Malayalam
+% Copyright (C) 2008-2010 Santhosh Thottingal
+%
+% This library is free software; you can redistribute it and/or
+% modify it under the terms of the GNU Lesser General Public
+% License as published by the Free Software Foundation;
+% version 3 or later version of the License.
+%
+% This library is distributed in the hope that it will be useful,
+% but WITHOUT ANY WARRANTY; without even the implied warranty of
+% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+% Lesser General Public License for more details.
+%
+% You should have received a copy of the GNU Lesser General Public
+% License along with this library; if not, write to the Free Software
+% Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+%
diff --git a/Master/texmf-dist/tex/context/patterns/lang-th.lua b/Master/texmf-dist/tex/context/patterns/lang-th.lua
index 848089b8662..eded2bf09c8 100644
--- a/Master/texmf-dist/tex/context/patterns/lang-th.lua
+++ b/Master/texmf-dist/tex/context/patterns/lang-th.lua
@@ -33,11 +33,11 @@ return {
},
["patterns"]={
["characters"]="กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮะัาำิีึืฺุูเแโใไๅ็่้๊๋์ํ๎",
- ["data"]=".ชี5วั .ทัศนู5 .ที่3 .บท1 .รง4 .ราย3 .ลำ3 .สน5ท .สู3ต .ใบ3 2ก1ก ก4กม กก4ส 2ก1ข ก4ขค กข5คณ ก4ขช กข5ชา ก4ขณ ก5ขณะ ก5ขณา ก4ขบ กข5บุ ก4ขภ กข5ภั ก4ขม ก5ขมั กข5มา กข5มู กข5ลา ก4ขเ กข5เท กข5เว ก4ข์ ก1ค กง5บว ก1จ ก1ช 2กซ ก3ซิ กญ5จน กฎ5หม กฎ5เก กฏ5หม ก5ดิน ก1ต ก4ตด กต5ดิ ก4ตส ก4ตเ ก1ท ก1น ก4นด ก4นธ ก1บ ก1ป กป4ร ก1พ ก1ฟ ก1ม ก4มม กม5ลา ก4มส ก4มเ กย5มุ ก3ย้ กร5กฎ ก5ร5ณั กร5ต๋ 1ก4รร กร5รา กร5ลา ก5ราค ก4รู กร5ไฟ กล5นค กล5บิ กล5มห ก2ว ก5วัต ก5ษณน ก3ษณะ ก5ษณา ก5ษมา ก5ษมี กษ5เท ก1ส กส4น ก4สโ ก1ห 3กอน กอ5อิ กะ5ถั กะ5ผล 4กะร 1กั 1กา กา5กะ กา5ดู กา5นี กา5น้ กา5บอ กา5ฝา กา5ร่ กำ5ด้ กำ5ทอ กำ5ผล กิ5กะ 1กิจ กิ4ต กิ5นี 3กิริ กี5รณ กี5รต กี5สถ 1กุ กุ5งอ กุ5ชิ กุ5ฎุ กุ5มุ กุ5รร กุ5ลี กุ5แห 1กู กู5ปร กู5รข กู5รม กู5ลิ ก1เ ก1แ ก1โ ก1ไ ก่5กอ ก่5บ้ ก่5ป่ ก์5ท็ ข2 ขม5หิ 4ขลา ขอ5ขม ขอ5ง้ ขอ5อภ 1ขั 1ขา ขา5ก๊ ขา5ทน ขิ5ปส ขิ5ไณ ขี้1 ข่5มุ ข่5หง ข้าว3 ค1ค คช5สี คช5เช คช5เม ค4ณิ ค4ทร คท5รี คท5วอ คน5ยอ 4คนิ คป5ซู คป5ผก 3คมน คม5ฟร คม5ลอ 2คย คร5ซอ คร5นอ คร5นี คร5พน คร5ฟิ คร5มเ คร5ร้ คร5ลิ คร5หา 4ค5รัก คฤ5หบ คฤ5หา คฤ5โฆ คล5คู ค2ว คว5ทอ 3ควา 2คส คส5ติ คห5กร คห5นิ คห5บด คห5สถ 3คอน 3คัน 1คา คา5ปู คา5พจ คา5พย คา5รว คา5วจ คำ5ดี คำ5โอ คำ5ไก คี5รี 1คุ คุ5ณู คุ5ลี 4คุ์ คู5ปอ คู5ลอ 2ค1เ ค1โ 2ค์ ค์5จำ 1ฆา ฆา5ณั ฆี5ยก ง1ก ง4กห งกะ4ร ง4กเ ง4ก์ ง1ข ง4ขก ง4ขต ง1ค ง4คจ ง4คช ง4คญ ง4คธ ง4คบ ง4คป งค5วั ง4คศ ง4คโ งฆ5ปร งฆ5สภ งฆ5เถ งฆ5เภ ง1ง ง4งเ ง1จ ง1ฉ ง1ช ง4ชี ง1ซ ง1ด ง1ต ง1ท ง1น งบ5ดุ ง1ป ง1ผ ง1พ ง1ม ง1ย ง1ร ง1ล ง1ว ง4วเ ง1ส งส5กล งส5กุ ง4สบ ง4สพ งส5พย ง4สภ ง1ห งห5นา ง4หบ งห5บั งห5รา 1งา งา5ช้ งา5รำ งู5สว ง1เ ง1แ ง1โ ง1ไ ง่5งอ จ1จ จ4จว จ1ฉ จด5จ่ จต5จำ จต5มู จป4ก จมบ5พ 3จริ จอ5งอ 1จั 1จา จา5มร จา5มี จา5รึ จำ5ทว จำ5อว 1จิ จิ5จู จิ5ตอ จี5ดี จุ5ฑา จุ5สม จ1เ ฉ2 ฉก5ฉว ฉก4ษ ฉท5ทิ ฉร5ฉิ 1ฉั 1ฉา ฉา5ก๊ ฉา5พย ช1ช ช1ฌ ช4ฌก ช4ฌฆ ช5นีก 4ชน์ ชฟ5รอ ชฟ5โร ชร5กล ชร5ริ ชร5ฤก ชร5หล ชร5หึ ชว4โ ชอง4 1ชั 1ชา ชา2ต ชา5ตร ชา5ปี ชา5มต ชา5ยต ชา5สง ชำ5งั 3ชิต ชิ5นี ชิ5รณ ชิ5แก ชี5ผะ ชี5ผ้ ชี5ฟอ ชี5รณ 3ชีว ชี5วน ชุ5ติ ชุ5ลด ชู5ปก ชู5ปถ ชู5ปโ ช่5อิ ช้5สอ ช้5ได ซน5ทร ซฟ5ตี ซ5ราม ซล5มอ 1ซา ซา5ชู ซา5มู ซิ5ตร ซิ5ตี ซิ5ฟิ ซิ5แล ซี5ดี ซี5นี ซี5รา ซี5ริ ซี5รี ซี5ร็ ซี5ลี ซู5ซู ซู5บิ ซู5ริ ซู5ลิ ซ1เ ซ1โ ซ่5ง่ ซ่5ซ้ 1ซ่า ญจ5ดุ ญ4จน ญ5จ5นท ญ5จ5นบ ญ5จนา ญจ5บร ญ5จ5มบ ญจ5รง ญจ5วี ญจ5ศี ญ4ฉน ญ1ช ญ1ญ ญประ4 1ญา ญา4ต ญ่5บ้ ฏ1ฐ ฏ4ฐบ ฏิ5ทิ ฏิ5ปท ฏิ5ปุ ฏิ5สน ฏิ5สว ฐ4ภั ฐม5ฌา ฐม5พย ฐม5ฤก 1ฐา ฐา5นี ฐุ5ชุ ฑา5มณ ฑา5สถ 3ฑูร ฒิ5สภ ฒิ5สม ณ1ฑ ณ4ฑก ณ4ฑฆ ณ4ฑน ณ5ฑนะ ณ4ฑบ ณ4ฑม ณฑ5ลา ณ4ฑส ณ5ฑสก ณฑ5สถ ณ5ฑ5สี ณฑ5โล ณ4ฑ์ ณย5รั ณ1ร ณ4วา ณสม4 ณห5พล ณห5ภู 1ณา ณา5ปี 1ณิ 1ณี ณี5สง ณู5ปโ ด1ก ด4กง ด4กด ดก5ดื ด4กเ ด4กแ ด1ข ด1ค ดง4ค ดง5ออ ด1ช ด4ชน ด5ชนะ ด1ด ด4ดเ ด1ต ด1ท ด1ป ด1พ ดร5ลิ ด3ร้ ด1ส ด4สก ด1ห 1ดั ดัส5ต 1ดา ดา5กอ ดา5มุ ดา5รก ดา5สว ดำ5ฤษ ดิ5ทอ ดิ5ทิ ดิ4บ ดิ5วร ดิ5ศว ดี5ดี 3ดีน ดี5ฝ่ ดี5รอ ดี5ลิ ดี5วี ดี5หม ดี5หว ดู5ถู ดู5ปอ ดู5รั ดู5หม ดู5แค ด1เ ด1แ ด1โ ด้5ยิ 2ด์ ด์5สป 2ตก ตก5ร้ ต1ค 2ต1ช 2ต1ต ต4ตภ ต4ตส ต4ตโ ต5ถกะ ต3ถา ต5ถุป ต5ถุศ ตถ5เล ตทัศนูป5 2ตน ตน5ฟอ ตน5วร ต4นาธ 2ต1บ ต4บช ตบ5ชว ตป5นี ต1ภ 2ตย 4ตรก ตร5กิ ตร5งอ ตร5จี ตร5จุ 4ตรฐ ตร5ตร ตร5ทว ตร5ผล ตร5ฝร ตร5พล ตร5รง ตร5ลด ต5ริยา ต4รู 2ตร์ ตฤ5ตี ตล5รั ต1ส ต4สค ตส5วา ตส4เ ต4สแ ตส5แต ตอ5ม่ ตอ5รอ ตะ5ใภ 1ตั 1ตา ตา5กล ตา5กว ตา5นึ ตา5ปร ตา5ปล ตา5ผิ ตา5ฟู ตา3มห ตา5มอ ตา5มะ ตา5ฬี 3ติก. ติ5จู ติ5ช่ ติ5ทิ ติ5นร ติ5บอ ติ5มศ ติ5มส ติ5มอ ติ5ยม ติ5อิ 4ติ์ ตี5ขล ตี5คู ตี5ตื ตี5รว ตี5ลั ตุ5ตถ ตุ5ทส ตุ5ป่ ตุ5มห ตุ5รก ตุ5ลั ตุ5สด ตู5ดิ ต1เ ต3แล ต1โ ต่5ถา ต่5ว่ ต่5สว ต้5ก๋ ต้5ตอ ต้5ฝุ ต๋5เต ต์5คล ต์5ฟู ต์5ศต ถด5ถอ ถม5ถื ถล5ไถ ถว5ไม ถะ5ถั ถ4าธ ถา5วร ถ4ีย ถี5ลิ 3ถุน ถ่5ถอ ถ่5ถา ทก5ซั 4ทกา ทค5ติ ทค5นี ทด5รอ ทด5ลอ ทธ5คย ท5ธชะ ทธ5ฎี ทธ5ปฏ ทธ5พร ทธ5รั ทธ5ศต ทธ5สี ทธ5อง ท5ธิก ท5ธิช ท5ธิบ ท5ธิป ท5ธิผ ท5ธิพ ท5ธิภ ท5ธิร ท5ธิฤ ท5ธิศ ท5ธิโ ทธ5เจ ทพ5ธิ ทพ5ยุ ทฟ5ลอ 2ทย ท5ยาน ทร5คต ทร5คร ทร5ธน 3ทรร ทร5สโ ทร5หว ทร5หึ 1ทรา ท5ราก 4ท5ราห 1ทรี ทว5ทห ทว5สถ ทศ5ทิ ทศ5วร ทสน5ท ทส5โก ทห5วั ทห5ฬิ 1ทั 1ทา ทา5ฐิ ทา5ฒิ ทา5นอ ทา5มร ทา5รพ ทำ5ขว ทำ5ซ้ ทำ5ท่ ทำ5โท ทิ5ฆั ทิ5ฐิ ทิ4พ ทิ5พา ทิ5วง ที5นว ที5นี ที5รา ทุ5คต ทุ5ลั ทุ5ศี 1ทู ทู5น่ ท1เ ท์5ดอ 1ธร 4ธรส 4ธรั 1ธา ธา1รณ ธิ5ฤท ธิ5ศี ธิ5สม ธี5รี ธุ5ดง ธุ5ลี ธู5ปน น1ก น4กค น4กป นก5ยู นก5รู น1ข นข5ลิ น1ค นค5ริ น1จ น4จอ นจ5อน น1ช น4ชญ น1ซ น1ด น4ดร น1ต นต5กว น5ตกะ นต5ดิ น4ตท นต5ทิ นต5ปิ น4ตภ น5ตระ น5ตรั น3ตรา น5ต5ริ นต5ฤด น3ติ น5ตุก น5ตุฏ น4ต์ นถ5ธุ นถ5รจ นท5ขี นท5นน น5ทนะ นท5ผล นท4ย น5ทรง น5ทรล น5ทรั น5ทรุ นท5ฤก น5ทลา น5ทวย น3ทอ น1ทิ น3ที นธ5กร น5ธกะ น5ธนะ น5ธุก น5ธุร น5ธุว น5ธุศ นธ5ไม น1น น4นต น4นท น4นร นน5รา น1บ นบ5นอ น1ป น4ปจ นป5จู น4ปท น1พ นพ5ปฎ นพ5ศู นภ5ศู น5ยนต นฤ5คห นฤ5ปเ นฤ5เท นฤ5เบ น1ล น4ลล นว5ร่ น1ศ นษ5กร น1ส น4สซ น4สส นส5แด น4สโ น1ห นอ5กะ 3นอน 1นั 1นา นา4คร นา5ณั นา5ปร นา5รย นา5วต นา5วล นา5สณ นา5สน นา5สว นา5ฬิ 4นาะ 1นิ นิ5ด้ นิ5ฟอ นิ5มน นิ5ยม นิ5ยา นิ5รอ นิ5ลุ นิ5วร นิ5สง นิ5สถ นิ5สี นิ5แด นี5มี นี5มู 1นุ นุ5พย 1นู 2น1เ น1แ น1โ น1ไ น่5อี 3น้อ 1น้ำ น์5สไ บ1ก บ4กษ บกิส5 บ4กแ บ1ข บ1ค บ4คท บค5ที บ4คโ 1บดี บ1ท บบ5ฉบ บบ5ฝึ บบ5อย บ1ป บ1พ บร5มี บ1ส บ4สบ บส4เ บ1ห บอ5ดี บอ5ระ 3บอล 1บั บัพพาชนี5 1บา บาจ5ร บา4ต บา5ตอ บา2ท บา5ทา บา5ทุ บา5รน บา5รอ บา5สม บี5คิ บี5ร่ 1บุ บุค3 บุ5ตร บุ5ถุ บุ5รพ 1บู บู4ช5น บู5ติ บ1เ บ1แ บ1โ บ๊5จี บ๊5เบ ปก4ส ป4จั ป4จา ปฐ5ปท ปฐ5พี ปต5ถก ปต5พล ป4ทา ป1ป ป4ปเ ปม5ด้ ป4ยุ ปร5ตอ ปร5ตี ปร5ตุ ปร5ผั ปร5ษณ 1ประ ปร5แก ปร5แท ปล5ญว ป4วา ปส4ต 1ปั 1ปา ปา5ฐก ปา5ณก ปา5นี ปา5ปิ ปาร4 ปา5รเ ปิ5ดอ ปิ5ยภ ปิ5ยอ ปิ5หก ปี5ชี ปี5มะ ปี5ฬก ปี่3 ปุ5คล ปุ5ถุ ปู5จ๋ ปู5ติ ป1เ ป1ไ ผก5ผั ผณิ5ศ ผน5ผั ผ4นิ ผ4ยา ผล5พล ผล5ไม ผ4สา ผี5ดิ ผี5ตอ ผี5ถ้ ผี5ห่ ผ้า3 3ฝอย ฝี5มะ ฝ่5ฝั 3พจน พจ5นี พช5ฉล พช5รา พทัก4 พน5ทะ พ4นั พนิ4 พ1พ 2พ2ย พย5ก๊ พร5ชย พร5ซี พร5มี 1พรร พ4รู 3พฤก พฤ5ฒา พล5ทิ พล5ร่ พส5เฟ พอ5คว พอ5สม 1พั 1พา 4พาจ พา5ชน พา5นร 1พิ พิ5ถั พิ5ถี พิ5ปล พิ5รอ พิ5รี พิ5ลึ พิ5ศุ พิส5ม พี5ระ พุ5ชิ พุ5พอ พู5ทว พู5พอ พ1เ พ4เย พ่5ป๊ พ่อ3 พ้5ท้ 2พ์ พ์5ดี 1ฟั 1ฟา ฟิ4ลา ฟี5ฟ่ ฟู5ฟ่ ฟ1เ 1ฟ้ ภค5ทร ภค3ว ภช5นี 1ภั 1ภา ภา5ณก ภา5ณว ภา5รด ภา5รต ภา5รย ภา5วน ภิ5ชน ภิ5มห ภิ3ร ภิ5สม ภิ5ไธ ภุ5ชง 1ภู ภู5ฏา ภู5ริ ม1ก ม4กม ม4กษ ม1ข ม4ขล ม1ค ม4คค ม4คอ มค5อิ 1มงคล มง5ฟอ ม1จ ม1ช มช4ว ม1ซ 3มณฑ มณ5ฑน มณ5บร มณ5พร มณ5เฑ มด5ยอ มด5ลู ม1ต ม4ตธ ม4ติ ม4ตไ มต5ไต ม1ท 3มนตร มน5ฮั ม4นุ ม1บ มบ4พ ม1ป มป4ช มป5ฤด มป5ฤๅ ม4ป์ ม1พ ม4พก ม4พว ม1ภ มภ5กถ ม1ม ม4มเ ม4มโ มย5รา 3มรร ม3รั ม3ริ มฤ5คิ มฤ5เค มล5ทิ ม3ลิ ม3ล้ ม1ว มว5มอ ม4วล ม1ส มส4เ มห5กร ม3หน มห5ภา ม5หาญ ม5หาย มหา3ส มอ5ขว มอ5คร มอ5ดู มอ5ตำ มอ5นว มอ5นอ มอ5ระ 4มอั มะ5ถั มะ5ฝ่ มะ5ฮอ 1มั ม4ั่ 1มา มา4ก มา5ดร มา5นร มา5ป่ มา5พจ มา5มก มา5มุ มา5ม่ มา5ยณ มา5ยอ มา5ร่ มา3ว4 1มิ มิ5กภ มิ5ชิ มิ5ซร มิ5ตล มิ5ถิ มิ5น่ มิ5ฟล มิ5ลำ มิ5แพ มี5ขม มี5หน 3มืด 1มือ. มุ5ทะ มุ5ทั มุ5ทิ มุ5ทุ มุ5ฮั มู5ซี มู5ป่ มู5รต มู5ลิ มู5หย มู5หร มู5ฮั มู5แด มู5แผ มู5แฮ ม1เ ม1แ ม1โ ม1ไ ม4่า 3ม้า ม์5ภิ ยก5ย่ ย1กร ย4ก5ร้ ย1ค ยง5บ่ ยง5ฝ้ ยง5อย ยจ5คร ยด5ย้ ย1ต ย1ท ย1ธ ยบ5ร้ ย1ป ย1พ ย1ภ ยม5ยอ ยม5รา ยม5หา ยม5อี ย4มิ ย1ย ยย4ส ยร5ถี ย5รบั ยล5ไท ยว5ข้ ยว5จ๊ ยว5ดอ ยว5นี ยว5ย่ ยว5รั ยว5ไส ย1ศ ย1ส ย1ห ย4หฐ ยห5ฐา ย4หป ยห5ปร ยอ5บี ยอ5รม 1ยั 1ยา ยา5กฤ ยา5กว ยา5ฉุ ยา5ณม ยา5ณว ยา5ถ่ ยา5บร ยา5สล ยา5สี ยา5ฬั ยำ5ทว ยี5รา 1ยุ ยุ5คล ยุ5ตก 4ยุภ ยุ5แย ยุ5แห ยู5ถิ ยู5ฟ่ ยู5ริ ยู5ไน ย1เ ย1แ ย1โ ย์5กล ย์5ถ่ ย์5มน ย์5หน 2รก รก5ซอ รก5ซึ รก5ซ้ ร1กร รก5รา รก5ร้ รค5พว รง5พย รง5รอ รจ5ถร รณ5คด รณ5ตร รณ5ถั รณ5พฤ รณ5สถ ร5ณาญ รณู5ป 4รณ์ ร1ด ร4ดป ร4ดแ ร4ดโ ร4ดไ รด5ไอ รถ1 รถ5พย ร1ท ร4ทฤ รท5ฤด ร4ท4ว รท5วิ รธ5ขึ รธ5สร รธ5เก รน5ทุ 4รนา ร1บ ร4บค ร4บถ รบ5ถ้ ร4บม ร4บั ร4บไ รบ5ไก ร1ป ร4ปณ ร5พชา ร5พชิ รพ5ทิ ร1ภ ร4ภย รม5รอ รมาว5 รม4เห ร4ยั รร4ก รร5คา รร5จถ รร5จว รร5ชิ รร5ณึ รร5ถา รร5ยง รร5ยเ รร3ล รร5หา รร5แท รร5แส รร5ไก รร5ไต รศ5นี รษ5ตร ร1ส ร4สก ร4สช ร4สเ ร4สโ ร3หิ ระ1 ระ5สา ระ5หก 5รังส 3รัฐ 1รัต รา5กฏ รา5กฤ รา5กว 1ราช รา5ชู รา5ดร รา5ดว รา5ดู รา5ม่ รา5วณ รา5สง รา2ห รา5หุ รำ5งั รำ5จว ริ5ซึ ริ5ตร ริ5ทึ 4ริพ ริ5มน 4ริยจ 4ริยย 4ริร ริ5อ่ ริ5แล 4ริ่ รี5คู รี5ฑา รี5ดู รี5ตร รี5ตอ รี5มู รี5รั รี5รา รี5ริ รี5ลั รี5ลิ รี5ล่ รี5สอ รี5สะ รุ5กว รุ5ขร รุ5คร รุ5ทว รุ5ธิ รุ5มุ รุ5วน 1รู รู5ที รู5นี รู5บิ รูป5ก รู5ปิ รู5มา รู5มู รู5หร 2ร1เ ร1โ ร่5กะ ร่5ตร ร่5ร่ ร่5หล ร์5กิ ร์5กี ร์5ติ ร์5ตี ร์5ตู ร์5ทิ ร์5ฟอ ร์5ฟู ร์5ลี ร์5วอ ฤ4ดา ฤป4เ ฤษ5ฎี ฤห5บด ล5กนะ ลก5ลา ลก5วั ล3กอ ล4กัย ลข5คณ ลข5หม ลชี4 ลด5ระ ลด5ลิ ล4ดา ล1ต ล4ตฟ ลต5ฟอ ลบ5ตะ ลบ5มุ ลบ5ล้ ลบ5ไส ลป5ตอ ลม5งว 2ลย ล1ล ล4ล์ ล3วี ลว5ไห ลส5ไต ลห5กุ ลอก5ล ลอ5จี ลอ5สร ละ5ผล 1ลักษ ลา5กล ลา5นี ลา5ป๋ ลา5พอ ลา5มี 3ลาร ลา5รอ ลา5ร้ ลา5ฤก ลา5ส้ ลิ5ก่ ลิ5จู ลิ5ซึ ลิ5ตอ ลิ5นอ ลิ5น่ ลิ4บ ลิ5บา ลิ5ฟอ ลิ5มู ลิ5อิ ลิ5ไท ลิ5ไล ลี5ตะ 3ลีน ลี5ผล ลี5ลา ลี5วู ลุก5ร ลุก5ล ลุ5ล่ ลูก1 ลู5ที ลู5มิ ลู5ลอ ลู5ออ 2ล1เ 2ล1แ ล1โ ล่5ติ ล่5ที ล่5หล ล่5ออ ล้5โพ ล์5สต ว3กร วก5ว่ ว5การ ว1ค 1วงศ วจ5ตร วจ5สอ วช5นี วด5ถ่ วด5มว วด5ยิ วด5ระ วด5ลา วด5ล้ วด5อ้ ว1ต ว4ตฉ วน5ถี วน5ท้ วน5ผส วน5รว วน5ร่ วน5อิ วบ5ยอ วบ5รว วบ5รั วบ5ฮา ว1ป ว1พ วม5รอ ว3มู วย5กิ วย5ก้ วย5จี วย5ริ วย5รื วย5ล้ วย5ไท วย5ไม วร5ธิ วร5มณ วร5มห ว4รย 1วรร4 ว4ร์ วล5ระ ว1ห วอ5ชิ 1วั วัน3 วันต5 วันท4 1วา วา4ต วา5ตก วา5ติ วา5นร วา5นึ วา5บร วา5มน วา5รณ วา5สนะ วา4ห วา5หน วา5หิ 1วิ วิ5กล วิ5กส วิ5คห วิ5จุ วิ5ดี วิ5ตก วิ5ตร วิ5ตี วิ5ถี 3วิท วิ5ทิ วิ5ธุ วิ5ธู วิ5ปก วิ5ปฏ วิ5ปล วิ5ปว วิภู5 วิ5มล วิ5รง วิ5วร วิ5ศร วิ5ศุ วิ5ษุ วิ5สร วิ5สฤ วิเล5 วิ5ไล วี5คู วี5ชน วี5ดิ 1วุ ว1เ ว1แ ว1โ ว่5ห้ ว้5ชื ว้5ทุ ว้5ลา ว์5ลิ ศ1จ ศพิ4 3ศรี ศ2วร ศษ5ซ้ ศษ5เก ศษ5เห 1ศั ศัก5ร 1ศา2 ศา5กา ศา5ขบ ศา5นุ ศา5ภิ ศา5รย ศา5รั ศา5ริ ศา5ลา 1ศิ ศิ5รพ ศิ5รว ศิ5ศี 1ศึ ศุ5กล ศู5ลิ ศเจ5ร ษ3ฎา ษฐ5ภค ษ5มณี ษ4มา 1ษั 1ษา ษา5คเ 1ษิ ษ์5พย สก5ลิ สก5ลึ สก5วั สก5วา ส4กา 4ส4กุ สข5บุ สง5ขล ส1ซ ส5ดิก ส5ดิน ส5ดิภ ส5ดิม สต5ทิ ส3ตรา 2สต์ สถ5วี 4สถ์ สน5ธย สน5ธิ ส5นียะ ส4นุ สนูป5 ส4ปา สพ5ติ ส2ม สม5ดุ 3สมบ สม5ผส สม5ผุ สม5ผเ สม5ยอ สม5ฤด สม5ฤต สม5หว ส5มัท ส5มัน สมุ4 สรร5ช สร5ลอ สล5บร สว4ก สว5ยม ส4วร สว5ริ ส4วา 4สวิ ส1ส สห5กร สห5กา สห5ชา สห5ธร สห5ปร สห5พั สห5ภา สห5รา สห5ศึ สอ5พล สอ5พอ สะ5ใภ 1สั สัญประ5 สัน3ถ สัม3 1สา สา5กิ สา5คเ 4สาธ สา5นึ สา5มน สา5มี สา5วพ สำ5ออ สำ5โร 1สิ สิ5ถิ สี5ข้ สี5ชอ สี5ดว สี5ตล สี5ตโ สี5ถ่ สี5ผึ สี5ฝุ สี5ละ สี5ลั สี5วล 1สุ สุ5กร สุ5กำ สุ5กี สุ5ขิ สุ5ขุ สุ5คต สุ5คร สุ5นี สุ5บร สุ5บิ สุ5ปร สุ5มน สุ5สง สุ5ไห 2สุ์ 1สู ส1เ ส4เฟ ส1โ ส4โก ส4โค 3ส่ว ส่5ไค ส้5กร ส้5ติ ส้5ไก 2ส์ ส์5หย ห2 2ห1ก หก5ระ หก5ล้ 5หการ หง4ส หง5สา หฤ5หร หฤ5โห หล5สะ หอ5คอ หอ5สม 1หั หา5กฐ หา5บพ หา5ปณ หา5พร หา5รื หา5ฤก หิ5รก หิ5ศว หุ5คู หู5กร หู5กว หู5หน ห้5ท่ ห้5ท้ ห้5ร้ 2ห์ ห์5กร ห์5สน ฬว5รา ฬห5บู 1ฬา ฬา5มณ ฬา5รึ อก5ซอ อก5ซั อก5ถล อก5รณ อก5รี อก5รู อก5ร่ อก5ฤท อก5ลว อก5ลอ อก5ลา อก5ล่ อก5ว่ อก5ใบ อค5ที อฆ5สง อง4คม อง5ถิ อง5บร อง5บิ อง5ฟอ อง5ฟุ อง5ระ อง5อ้ อด5ถอ อด5น่ อด5ฝา อด5ยอ อด5รั อด5อย อด5ออ อด5อ้ อ3ดิ อต5ดอ อต5ด็ อ1ท อน5ง้ อน5ดร อน5ทำ อน5ผั อน5ฝู อน5ฟิ อน5ย้ อน5รา อ4นา อ4นุ1 อบ5ช้ อบ5ถา อบ5บี อบ5อว อบ5อ้ อบ5ไล อป5กิ อป5ติ อป5พร อป5พล อป4ร อป5วา อป5โล อพ5ริ อฟ5ฟิ อฟ5ฟี อฟ5ริ อฟ5ไล อ4ภั อม5ฎอ อม5ดอ อม5ถอ อม5ยิ อม5รา อม5ร่ อม5ฤต อม5หล อม5หว อม5ห้ อ5มอน อย5กอ อย5ก๋ อย5นว อย5ร่ อย5ร้ อย5อิ อ4ยา อย5ได อร5ชุ อร5มน อ3รั อ3รา อ1ริ อ1รี อ3ร้ อร์1 อล5จี อล5ซั อล5นี อล5ฟ่ อล5มอ อล5หม อ3ลั อ1ลิ อว5รุ อศ5กร อษ5ฐช อษ5ฐภ อส5กา อส5ติ อส5นี อส5พล อส5ฟอ อส5มิ อส5เฟ อส5แอ อส5ไพ อ1ห 3ออน ออ5อว อะ5ธี 1อั 1อา อา5ค5เ อา5ฏา อา5ณั อา5ดุ อา5ดู อา2ต อา5ถร อา5นน อา5ปณ อา5มล อา5ย5ต อา5รด อา5รต อา5รบ อา3รย อา5ลป อา5วร อา5วี อา5สว อำ5ยว อำ5อว อิ5ชย อิ5ดะ อิ5ระ อิ5ศว อี5จู อี5ซู อี5ยิ อี5รุ อี5ลุ อี5ศว อี5หร 1อุ อุ5กฤ อุ5กล อุ5คร อุ5ดม อุ5ดร อุ5ด้ อุ3ตรา อุ5ตุ อุ5ทร อุ5ทิ อุ5ทุ อุ5ธั อุ5บล อุ5บ๊ อุ5มง อุ5รพ อุ5ลก อุ5แว อู5คู อู5รา อู5ลา อ1เ อเป5ร อเสก5 อเส5ข อเห5ต อ1แ อ1โ อโร3 อ1ไ 3อ่อ อ่5อว อ่5อ่ อ่5โถ อ้5อว อ้5โถ อ้5โล ฮก5ฮา ฮก5ฮื ฮน5รี ฮฟ5วี ฮล5ซิ ฮล5ดิ 3ฮอล ฮา5นอ ฮา5ป่ ฮิ5บร ฮี5บร 3ฮื้ ฮู5ลา ฮู5ล่ ฮ1เ ฮ่5กึ ะ1ก ะ1ข ะ1ค ะ1ง ะ1จ ะ1ฉ ะ1ช ะ1ซ ะ1ด ะ1ต ะตะ4 ะ1ท ะ1น ะ1บ ะ1ป ะผี4 ะ1พ ะ1ม ะ1ย ะ1ร ะ1ล ะ1ว ะ1ส ะ1ห ะ1อ ะ1เ ะ1แ ะ1โ ะ1ไ ั2 ัก5ง่ ัก5ซ้ ัก5ตบ ัก5ผ่ ัก5ฝ่ ัก5ยอ ัก5ยิ ัก5รั ัก5ร้ ัก3ล ัก5วิ ัก5ษร ัก5อิ ัก5อี ัก5อ่ ัก5ใค ัก5ใฝ ัค5ฆิ ัค5ซี ัค5สถ ัง5ถึ ัง5ศุ ัง4ส5ว ัง5อว ัง5ฮี ัจ5กล ัจ5ญะ ัจ5ถร ัจ5นึ ัจ5โจ ัช5ฎา ัช5นี ัช5พย ัช5พื ัช5รา ัช5ริ ัช5สม ัช5เร ัช5แพ ัช5โญ ัญ1 ัฏ5ทุ ัฏ5สง ัฐ5ทิ ัฐ5บร ัฐ5สภ ัฐ5เค ัณ5ฏก ัณ3ฐ ัณ5ยก ัณ5เฑ ัณ5โร ัด1 ัต5ดึ ัต5ถล ัต5ถั ัต5ถิ ัต5มณ ัต5มห ัต5รา ัต5รี ัต5ฤก ัต5ลั ัต5หล ัต5หี ัท5คี ัท5ทว ัท5ธน ัท5ธิ ัท5รา ัท5ลี ัท5ลุ ัธ5ยม ัน5ฉ่ ัน2ต ัน5ตภ ัน5ตะ ัน5ตั ัน5ตา ัน5ถธ ัน5ทึ ัน5ทุ ัน5ท่ ัน4ธ ัน5ธา ัน5ธิ ัน5ผว ัน5ฝร ัน5ฝ่ ัน5ภิ ัน5ยะ ัน5ย่ ับ1 ัป5คั ัป5ผา ัป4ร ัป5ลา ัป5หง ัป5โป ัป5โห ัพ5ยอ ัพ5ยา ัพ5โพ ัพ5โห ัฟ5ฟิ ัฟ5ริ ัม4ช ัม5ลา ัม5หม ัย5มร ัย5รุ ัล5ดี ัล5ปน ัล5ปพ ัล5ปิ ัล5ฟิ ัล5มอ ัล5มุ ัล5ออ ัล5ไซ ัล5ไฟ ัว1 ัศ5นี ัศ5มี ัศ5เจ ัส5กา ัส5ดง ัส5ดน ัส5ดี ัส5ติ ัส5ถา ัส5ปู ัส5มั ัส5มิ ัส5ยิ ัส5รั ัส5ลิ ัส5วด ัส5วร าก5ถา าก5ฝร าก5ฝั า1กร า5กรร าก5รุ าก5ฮอ า3กี า1ข า4ขบ าข5บู า1ค า4คจ า4คท า4คบ า4คป าค5ปร า4คพ าค5พื า4คภ า5ครี าง5บำ าง5ฝี าง5ฟิ าง5ออ าง5อิ า1จ า4จญ า4จห าจ5หา า4จอ า4จเ าช5กร าช5คร าช5คฤ าช5ทิ า5ชนะ าช5นี าช5ปะ าช5ลั าช5วโ าช5สก าช5สี าช5อง า1ชิ า3ชี าช5เป าช5เล าช5โอ า1ซ าญ5รอ า5ฏกะ าฏ5ดน า5ฏลิ าฏ5ลี า3ฏิ าฐ5กถ าณ5คด าณ5สถ าด5ผว า3ดอ า3ดิ าด5ไท าด5ไห า1ต า4ตญ า4ตภ าต4ว า1ท า4ทธ า4ทน า5ทนะ าท5บง าท5บร าท5สก าท5หล า4ท์ า1ธ า4ธน า2ธย าธ5ยม าน5ญ่ าน5ผู าน5รว าน5รั าน5รา าน5ฤด าน5อว านุ1 าบ5จ้ าบ5ฉว าบ5ช้ าบ5ซึ าบ4พ าบ5รื าบ5ละ า3บิ าป5สร าป5ส่ าป5แช าพ5ถ่ าพ5ยน าพ5รั าพ5ลว าฟ5ต้ าฟ5ริ า3ฟิ า1ภ า4ภป า4ภล าภ5ลอ าม5ง่ าม4น4 าม5สก าม2ห าม5หม าม5หล าม5หา าย5กล าย5กอ าย5ขว าย5ข้ าย5ชน าย5ดิ าย5ด้ า5ยตน า5ยนธ า5ยนม าย5นอ า5ยนเ าย5บร าย5ผอ าย5ฝั าย5มุ าย5ม่ าย5รุ าย5ร้ าย5ลั าย5ล่ าย5วอ าย5อำ า3ยิ าย5ไห าร5กำ าร3ค าร5ชุ าร5ณู าร5ตร า5รทะ าร5ธุ าร5บั าร5ผจ าร5พร า5รภย า1รม าร5รา าร5ละ าร5วด าร5ว่ าร5หน า1ระ า1รั า1รา า1ริ า5ริก า5ริยะ า3รี า1รุ า1ล า4ลก าล5กิ า4ลค า4ลจ าล5ฎี า4ลด าล5ดี าล5ทห า4ลป าล5ปก าล5พร า4ลว า4ลโ า4ล์ าว5ก่ าว5ข้ า3วดี าว5ดึ าว5นี าว5บอ าว5ยอ าว5ยื า5วรณ าว5รภ าว5รา า5ว5รี าว5รุ าว5ร้ าว5ฤก า5วอน าศ5นี า3ศร าศ5เล าษ5ดื าษ5ตร าษ5รา าษ5แก าส5กา าส5ด้ าส5ต้ าส5นี าส5ปอ าส5มห า1ห าห3ก าห5มง าฬ5โร า1อ าอนา4 า1ฮ า1เ าเม5ศ า1แ า1โ า1ไ ำ1ก ำ1ค ำท4ว ำ1น ำ1บ ำ1ป ำ1พ ำ1ม ำม5รง ำม5ลา ำ1ร ำ1ล ำ1ส ำ1ห ำ1เ ำ1แ ิก5ซี ิก5ถอ ิ1กร ิก5ร้ ิ3กฤ ิก5ล้ ิก5วา ิก5ษุ ิ3กิ ิกิ5ส ิ1ข ิ4ขส ิข5สิ ิ1ค ิ4คต ิค5ตอ ิ4คหะ ิฆ5เน ิง5ชี ิง4สต ิง4ห ิง5หา ิง5ห้ ิง5อร ิจ5ศี ิช4น ิช5ลิ ิช5เช ิญ5หน ิญ5โญ ิด5ฉิ ิด5นี ิด5ผน ิด5รอ ิด5ระ ิด5ลั ิด5ออ ิด5อ่ ิต5ซู ิต5ถี ิต5ฟอ ิต5ลด ิต5ลา ิต5วส ิ1ติ ิ3ตุ ิท5ธั ิท5สน ิ3ธี ิน5งอ ิน5ฟร ิน5ยว ิน5ยอ ิน5ย้ ิน5ระ ิน5ริ ิน5ร้ ิ5นอบ ิน5อิ ิน5ฮุ ินู5ป ิบ5บิ ิบ5ผย ิบ5ยื ิบ5ระ ิบ5รี ิบ5ลั ิบ5ลิ ิบ5ล้ ิป4ก ิป5ซั ิป5ทอ ิป5ผล ิ3ปร ิป5สต ิป5ฮอ ิป5โป ิป5โย ิ1พ ิ4พพ ิ4พโ ิพ5โส ิฟ5ฟอ ิ1ภ ิม5ฝี ิม5ลา ิ1มุ ิย5มิ ิร5ชร ิร5วด ิ1รั ิ1รา ิ1ริ ิ1รุ ิล5ปิ ิ1ลั ิ1ลา ิ1ลิ ิว5กิ ิว5ซี ิว5ทร ิว5บิ ิว5ยอ ิว5ริ ิว5ลิ ิว5ลึ ิวา5ส ิศ5พร ิศ5ร้ ิศ5เล ิศ5แพ ิษ5ณุ ิษ5ตร ิส5กร ิส5กี ิ5สตร ิส5ติ ิส5ที ิส5นี ิส5บอ ิส5รา ิส5ริ ิส5ลา ิส5ไซ ิ1ห ิหา4 ิ1เ ิเน4 ิ1โ ี1ก ี4กต ี4กย ีก5ย่ ีก5ริ ีฆ5สร ีช5คณ ีซ5สถ ีด5ฆ่ ีต5กว ีต5ปฏ ี1ท ีท4น ีบ5รั ีบ5รุ ีบ5ร้ ี1ป ี1พ ี4พจ ีย5กถ ีย5รย ีย5รอ ีย5ระ ีย5รั ี5ยวน ีร5ณั ีล5จุ ี4วั ีวา4 ีษ5มา ีห5นา ี5หน้ ีห5บั ีห5มุ ีห5รา ี3หล ีห5โม ีห5ไส ี1อ ีอ4ร ีอา4 ี1เ ี1แ ี1โ ี1ไ ี่5ก่ ี่5ถ้ ี่5ปุ ี่5ปู ี่3ห ี่5โค ี่5โป ี้5กร ี้5จ้ ี้5ซั ี้5ตะ ี้5ริ ี้5ลั ี้5ลุ ี๊5กร ี๋5จ้ ี๋5อ๋ ึก5ซึ ึก5ดำ ึก5ดื ึก5ยื ึก5ระ ึก5ลั ึก5ล้ ึก5ฮั ึด5ถื ึด5ฮั ึน5ทึ ืด5ฮา ือ5กล ือ5กอ ือ5กำ ือ5ข่ ือ5จ้ ือ5ชื ือ5ดำ ือ5ตร ือ5ถื ือ5นำ ือ5บิ ือ5ปล ือ5ปื ือ5ป่ ือ5พว ือ5พ่ ือ5ยน ือ5ยา ือ5รื ือ5ลา ือ5ล้ ือ5สอ ือ5สำ ือ5อี ุก5งอ ุก5ฉก ุก5ซ่ ุก5ดิ ุก5ผา ุก5รา ุก5รุ ุก5ละ ุก5ลี ุก5ล้ ุก5อี ุก5ฮื ุข5นา ุข5ปา ุข5ภั ุข5ภา ุข5ลั ุข5ศา ุข5ศึ ุข5เด ุค5ทอ ุ3คน ุง5ถุ ุจ5ลิ ุจ5หน ุช5รา ุช5เช ุญ5จน ุญ5ฤท ุญ5แจ ุฎ5ฐั ุฑ5พ่ ุณ5ค่ ุณ5ฑก ุณสม5 ุณ5หญ ุณ5หา ุณ5หิ ุณูป5 ุด5ผา ุด5ผ่ ุด5ลอ ุด5ลุ ุด5อู ุต5กว ุต5กิ ุต5ซอ ุต5ตก ุ5ตระ ุ5ตริ ุต5ลุ ุ3ทก ุท5ธั ุ5ทริ ุท5ลุ ุท5โธ ุน5ทร ุน5ผล ุน5รอ ุบ5งิ ุบ5ซิ ุบ5บิ ุบ5ผล ุบ5ยิ ุบ5อิ ุป5กร ุป5กิ ุป5จา ุป5ถั ุป5ทา ุป5ยุ ุป3รา ุ5ปริ ุ4ปส ุป5สง ุป5สร ุป5ฮา ุป5โภ ุป5โล ุพ5พา ุพ5ภิ ุภ5ชล ุภ5เค ุม4น ุม5นุ ุม5รุ ุม5หย ุย5ช่ ุย5ฝ้ ุ1ร ุร5ข่ ุ4รค ุ4รฉ ุ4รช ุ4รท ุ4รธ ุ4รบ ุ4รพ ุ4รภ ุ5รภี ุ4รย ุ4รร ุ4รล ุ4รว ุ4รศ ุ4รส ุ4รอ ุ4รแ ุ4รโ ุล5จอ ุล5ชี ุล5ธิ ุล5มุ ุล5สต ุล5สแ ุ3ลา ุ3ลิ ุศ5โล ุษ5จี ุษ5ฎี ุษ5ปร ุ4ษย ุษ5รา ุษ5ร้ ุษ5เพ ุส5รา ุ5สละ ุส5ลิ ุส5วา ุ1ห ุห5กล ุห5นา ุ4หย ุห5ยา ุ4หเ ุห5เท ุห5เส ุ4หโ ุห5โย ุ1เ ุ1โ ุ๊5ต๊ ูก5วั ู1ช ูญ5หา ูญ5เป ูญ5เส ูด5บึ ูด5รี ูต5รู ูธ5เร ูบ5ไล ูป4ก ูป5ฌา ูป5ถ่ ูป5ทร ูป5พร ูป5ร่ ูป5แบ ูป5โฉ ูฟ5วี ู2ม ู5มิน ูร5ข่ ูร4ณ ู5รณภ ู5รณม ู5รณะ ู5รณาก ูร4พ ู5รพะ ู5รพา ูร4ม ูล5กร ูล5ค่ ู3ลั ูส4ว ู1เ ู1โ ู่1 ู้1 ู๊5ตึ ู๋5กร ู๋5จี ู๋5อี เ2 เก5ยู เก5วั เก5ศว เก5อิ เค5ซอ เค5มี เค5ศว เจ5ดี เจ5นี เ4จร เจ5ลิ เจ5โต เซ5ซั เซ5ทิ เซ5นอ เซ5รุ เซ5แค เด5ซิ เด5บิ เด5รั เด5ลา เด5ลิ เด5ลี 2เตช เต5ปุ เต5มี เต5มู เต5ริ เต5ลุ เต5ศว เต5หะ เถ5รา เท5กร เท5คร เท5คว เท5โว เท5โศ เน4ต เน5ติ 4เนย เน5ระ เน5รั เน4ส เน5สา เน5เว เบ5ต้ เบ5บี เบ5ริ เบ5รุ เบ5ลี เป5ตอ เป5สก เป5สล เพ5ชุ เพ5ทุ เพ5สล เพ5โท เฟ5อี เภ5ตร เภ5ทุ เม5ฆิ เม5ดิ เม5ลา เร5กะ เร5ซิ เร5มอ เร5รว เร5วด เล5กร เล5คอ เล5ดี เล5วร เล5วู เล5หล เล5ฮุ เลิ4 เว5ก้ เว5ทิ เว5ฬุ เส5ฉว เส5นีย์ เส5รี เส5วก เส5วน เส5แส เห5มั เห5ยง เห5ระ เห5รั เห5ศว เห5ศั เห5สั เฬ5วร เอ5กว เอ5ซิ เอ5ธิ เอ5ฬก เฮ5ละ เฮ5ลิ เฮ5โม เฮ5โร แก5วั แค5รอ แค5ริ แค5ลอ แค5ลิ แค5แต แค5แส แช5บ๊ แช5เช แซ5ยิ แด5รี แต5แต แน2 แป5ซิ แ4ปร 3แพท แฟ5รี แ4ฟ้ แม2 แม5ชี แม5รี แม5เร แม่3 แอ5นะ โก4ฐ โก5ลอ โก5ลา โก5ลิ โก5วา โก5วี โก5ฮา โข5ทั โข5ภิ โข5เภ โข5โล โค5ตม โค5ติ โค5มู โค5ม่ โค5ริ โค5ลอ โค5ลั โค5ออ โค5อะ โค5แท โค5ไซ โจ5ปก โฉ5เบ โช5ดึ โช5ห่ โซ5กร โซ5นี โซ5ฟิ โซ5ยู โซ5ลู โซ5สเ โญ4ช โญ5ปว โด5จี โด5นี โด5รา โด5ลิ โต5กร โต5รอ โต5รา โต5ริ โต5ลิ โท5กร โท5คอ โท5พล โท5รอ โท5แอ โธ5ทน โธ5ปก โธ5วน โธ5เฟ โน5ทุ โน5ปจ โน5รม โน5รา โบ5ชุ โบ5ซอ โบ5ต้ โบ5รอ โบ5รั โบ5รา โบ5ลิ โบ5ล่ โบ5ไฮ โป5กส โป5ลิ โป5แล โป5โป โป5โล โพ5ซิ โพ5ทะ โพ5ระ โพ5ลา โพ5ลิ โพ5ลี โพ5หา โพ5แท โพ5ไซ โฟ5กร โฟ5นี โภ5คิ โภ5ไค โม5ฆี โม5ดู โม5ร็ โม5หา โย5ถิ โร5กะ โร5คิ โร5งั โร5ชิ โร5ธนะ โร5รา โร5ล่ โรส4 โร5สเ โร5หน โร5อี โร5ฮิ โร5แม โร5ไล โล5กร โล5กิ โล5กี โล5จน โล5ปุ โล5มก โล5มอ โล5รา โล5วะ โล5หิ โว5นอ โศ5ธน โศ5ภิ โส5กร โส5ติ โส5ธน โส5ภิ โส5ลิ โส5วร โส5หุ โส5โค โห5ฐา โห5รส โห5ระ โห5รา โห5สิ โห5ฬา โอ5กิ โอ5คล โอ5ค็ โอ5ดี โอ5รส โอ5ละ โอ5สถ โอ5อิ โฮ5โล 3ใช้ 1ให ไก5ลา ไก5วั ไข5ข้ ไข5คว ไข5มั ไข5สั ไข5สื ไค5ศว ไช5น่ ไช5ศว ไซ5ดอ ไซ5บอ ไซ5บี ไซ5ปร ได5ฟุ ได5ฟู ได5ลิ ได5ออ ไท5ฟอ ไท5รอ ไท5แท 3ไนย ไป5ริ ไพ5ชย ไพ5ธอ ไพ5รั ไพ5ริ ไพ5ลิ ไพ5หา ไพ5โร ไพ5โอ ไฟ5แช ไฟ5แน ไภ5ริ ไม5ถิ ไม้1 ไร5ตี ไล5บร ไล5บี ไว5กิ ไว5รั ไว5อะ ไห5รณ ไห5ศว ไห5หม ไห5หล ไอ5กร ไอ5ซี ไอ5ดอ ไอ5ติ ไอ5พอ ไอ5พ็ ไอ5ศว ไอ5ศุ ไอ5ศู ไฮ1 ็ก5ซั ็ก5ซี ็จ5ขบ ็จ5สร ็ด5ลอ ็ด5อร ็ด5อึ ็น5ฉ่ ็น5ทร ็น5รอ ็น5วู ็น5อย ็น5อ้ ็บ5ด้ ็ป5ท็ ็ม5หม ่ก5ลั ่1ค ่ง5ริ ่ง5อร ่ง5อำ ่ง5อ่ ่4ฉี ่น5ง่ ่น5ฉ่ ่น5ทะ ่น5มื ่4นย ่น5ยน ่น5ย่ ่น5รม ่ม1 ่ม5พว ่ย5กะ ่ย5ฉุ ่ย5รา ่ย5ร่ ่ว5ช้ ่ว5ถึ ่ว5ยว ่ว5ไห ่อ5กร ่อ5กว ่อ5กะ ่อ5กี ่อ5ก้ ่อ5ข่ ่อ5ตร ่อ5ตะ ่อ5ต้ ่อ5ถื ่อ5บื ่อ5ผส ่อ5มว ่อ5ม่ ่อย3 ่อ5ยอ ่อ5ย่ ่อ5รอ ่อ5ร่ ่อ3ล ่อ5ว่ ่อ5สร ่อ5ฮั ่อ5ฮ่ ่า5กล ่า5ช้ ่า5ดง ่า5ด้ ่า5ฝื ่า5พร ่า5มง ่า5รึ ่า5ร้ ่าว3 ่ำ5ชอ ่ำ5ช้ ่ำ5ต้ ่ำ5ต๊ ่ำ5ไห ่1เ ่1แ ้ก5อ้ ้ง5ถ่ ้ง5ฝุ ้น5งู ้น5ฉบ ้น5ฉ่ ้น5ทะ ้น5ทุ ้น5ท้ ้น5รุ ้น5ร่ ้ม5งว ้ม5ฉุ ้ม5น้ ้ม5ยิ ้ม5ละ ้ม5ลุ ้ม5อล ้ย5กล ้ย5งช ้ย5ล่ ้ย5อ้ ้ย5ใบ ้ว5รอ ้1ห ้อ5กร ้อ5กล ้อ5คร ้อ5คู ้อ5งอ ้อ5ฉี ้อ5ดึ ้อ5ด้ ้อ5ต๊ ้อ5ถอ ้อน3 ้อ5ผ้ ้อ5ฝั ้อ5ฟื ้อ5มู ้อ5ระ ้อ5ร่ ้อ5อึ ้อ5ฮื ้า5จอ ้า5ชื ้า5ชู ้า5ช่ ้า5ช้ ้า5ดี ้า5ถิ ้า5ถึ ้า5บ่ ้า5บ้ ้า5บ๋ ้า5ปี ้า5ผา ้า5ฝร ้า3พ ้า5มุ ้า5ว่ ้า5สม ้า5สร ้า5สล ้ำ1 ้1เ ้1แ ๊ก5ซอ ๊ก5ริ ๊ก5ลุ ๊ก5ฮว ๊ป5ซี ๊ย5ก่ ๋ย5อิ ๋อ5ด๋ ์ค5สเ ์ค5แล ์1บ ์1พ ์1ร ์1เ ์1แ ์1โ .ก6 .ข6 .ฃ6 .ค6 .ฅ6 .ฆ6 .ง6 .จ6 .ฉ6 .ช6 .ซ6 .ฌ6 .ญ6 .ฎ6 .ฏ6 .ฐ6 .ฑ6 .ฒ6 .ณ6 .ด6 .ต6 .ถ6 .ท6 .ธ6 .น6 .บ6 .ป6 .ผ6 .ฝ6 .พ6 .ฟ6 .ภ6 .ม6 .ย6 .ร6 .ฤ6 .ล6 .ฦ6 .ว6 .ศ6 .ษ6 .ส6 .ห6 .ฬ6 .อ6 .ฮ6 6ก. 6ข. 6ฃ. 6ค. 6ฅ. 6ฆ. 6ง. 6จ. 6ฉ. 6ช. 6ซ. 6ฌ. 6ญ. 6ฎ. 6ฏ. 6ฐ. 6ฑ. 6ฒ. 6ณ. 6ด. 6ต. 6ถ. 6ท. 6ธ. 6น. 6บ. 6ป. 6ผ. 6ฝ. 6พ. 6ฟ. 6ภ. 6ม. 6ย. 6ร. 6ล. 6ว. 6ศ. 6ษ. 6ส. 6ห. 6ฬ. 6อ. 6ฮ. 6ก์. 6ข์. 6ฃ์. 6ค์. 6ฅ์. 6ฆ์. 6ง์. 6จ์. 6ฉ์. 6ช์. 6ซ์. 6ฌ์. 6ญ์. 6ฎ์. 6ฏ์. 6ฐ์. 6ฑ์. 6ฒ์. 6ณ์. 6ด์. 6ต์. 6ถ์. 6ท์. 6ธ์. 6น์. 6บ์. 6ป์. 6ผ์. 6ฝ์. 6พ์. 6ฟ์. 6ภ์. 6ม์. 6ย์. 6ร์. 6ล์. 6ว์. 6ศ์. 6ษ์. 6ส์. 6ห์. 6ฬ์. 6อ์. 6ฮ์. 6กิ์. 6ขิ์. 6ฃิ์. 6คิ์. 6ฅิ์. 6ฆิ์. 6งิ์. 6จิ์. 6ฉิ์. 6ชิ์. 6ซิ์. 6ฌิ์. 6ญิ์. 6ฎิ์. 6ฏิ์. 6ฐิ์. 6ฑิ์. 6ฒิ์. 6ณิ์. 6ดิ์. 6ติ์. 6ถิ์. 6ทิ์. 6ธิ์. 6นิ์. 6บิ์. 6ปิ์. 6ผิ์. 6ฝิ์. 6พิ์. 6ฟิ์. 6ภิ์. 6มิ์. 6ยิ์. 6ริ์. 6ลิ์. 6วิ์. 6ศิ์. 6ษิ์. 6สิ์. 6หิ์. 6ฬิ์. 6อิ์. 6ฮิ์. 6กุ์. 6ขุ์. 6ฃุ์. 6คุ์. 6ฅุ์. 6ฆุ์. 6งุ์. 6จุ์. 6ฉุ์. 6ชุ์. 6ซุ์. 6ฌุ์. 6ญุ์. 6ฎุ์. 6ฏุ์. 6ฐุ์. 6ฑุ์. 6ฒุ์. 6ณุ์. 6ดุ์. 6ตุ์. 6ถุ์. 6ทุ์. 6ธุ์. 6นุ์. 6บุ์. 6ปุ์. 6ผุ์. 6ฝุ์. 6พุ์. 6ฟุ์. 6ภุ์. 6มุ์. 6ยุ์. 6รุ์. 6ลุ์. 6วุ์. 6ศุ์. 6ษุ์. 6สุ์. 6หุ์. 6ฬุ์. 6อุ์. 6ฮุ์. 6ะ 6า 6ๅ 6ำ7 6ิ 6ี 6ึ 6ื 6ุ 6ู แ6 โ6 5ไ6 7ใ6 6็ 6่ 6้ 6๊ 6๋ 6์ 6ํ 6ฺ 6๎ เ6ข เ6ฃ เ6ค เ6ฅ เ6ฆ เ6ง เ6จ เ6ฉ เ6ช เ6ซ เ6ฌ เ6ญ เ6ฎ เ6ฏ เ6ฐ เ6ฑ เ6ฒ เ6ณ เ6ด เ6ต เ6ถ เ6ท เ6ธ เ6น เ6บ เ6ป 7เ6ผ เ6ฝ เ6พ เ6ฟ เ6ภ เ6ม เ6ย เ6ร เ6ล เ6ว เ6ศ เ6ษ เ6ส เ6ห เ6ฬ เ6อ เ6ฮ ช6วา. ช6ไ ธ6ไน ม6ไห ส6ไต เลส7ไต ส6ไน ส6ไบ ส6ไป ส6ไล บ6ทคว ม6วก ม6วน ม6วด ม7วดี ม6วย ะม6วง ล7ชน ัต5ถุ ัต6ถุ์ 6ตร. ธา6ตุ. บุ6ตร. ค6รู ฮิบ6รู ฮีบ6รู ส6ภา ส7ภาร เส7ภา โส7ภา ผ6วา น6คร. .เห6ยง เปี่6 เขี้6 ม6ณี คาม7ณี .รม7ณี .รัม7ณี หม7ณี ง6วด ง6วน วัง7วน ง6วย มง6วง อย6อด พ6ญา จุ6รณ ฤ6ชา .ฤ6ทัย พรร6ดิ สวา6ดิ อ6ริ. จน6ที. ธค6ยา นิม6นา ย์ม6นา า7ณะ ิ7ณะ ุ7ณะ ณ7ณะ ก7ณะ ท7ณะ ล7ณะ ุษ7ณะ รป7ณะ หม7ณะ สม7ณะ ลว7ณะ รว7ณะ ร5ณะ ณร6สี ก6นะ ยก7นะ ค7นะ ย7นะ ภว7นะ มท7นะ รต7นะ ลว7นะ วจ7นะ วท7นะ วส7นะ ศม7นะ ภช7นะ ไช7นะ าลป7นะ รรธ7นะ สธ5นะ โสธ6นะ สว5นะ เสว6นะ สาว7นะ ัจ7นะ ัช7นะ ัฏ7นะ ัฒ7นะ ัต7นะ ัท7นะ ัป7นะ ัส7นะ ุจ7นะ อาส7นะ ุ7นะ 5ผี 7จำ 5งำ ห6งำ น7รำ ย7รำ ร7รำ โค7รำ ไพ7รำ น7ยำ ม7ยำ 5งง. ห6งง น7งก 5ชน. เ6ชน โ6ชน 5กร. ั6กร า7นะ ถ7ระ า7ยก. า7ยน. า7ฐี า7นี า7วี ป5โ ป6โย ป6โภ วิป7โย อุป7โภ ศ7นะ รร7มะ ต5ถี ุต6ถี 5บท. ส6บท 5บถ. ข6บถ ส6บถ 7ฟู 7ษุ 5ตะ. ค6ตะ ร6ตะ สร7ตะ า7มี มิ7ผ า7กิ า7กล ิ7กล. ์7กล 5นำ ห6นำ รี7ผ 7ณุ 5นี. ห6นี ฉ6นี าร6นี วีช6นี สส6นี มท6นี รม6นี น7ยิ ิ5ลี ุ5ลี า7ลี โค7ลี โม7ลี ท7ลี ร7ลี ก7ยะ ค7ยะ ป7ยะ ท7ยะ ธ7ยะ น7ยะ ษ7ยะ า7ยะ ิ7ยะ คี7ยะ ฆี7ยะ ณี7ยะ นี7ยะ รี5ยะ เปรี6ยะ มโห5 ิ7รี ี7รี ู7รี หา7รี ม7รี. น5รี. เต7รี. ช7รี. ถ7รี ภ7รี ภม7รี โม7รี ภุม7ร พ7รี. เว7รี 5ผล 5ดล. 5รส. ก6รส จ6รส โค6รส ท6รส พ6รส ด6รส 5คน. ณ7หา ฤๅ5 ฤา5 .ยี่7 า7วะ เท7พี เท7วี บรร7จ บรร7ถ บรร7พต 5ทก. 5ดร. น7ทร. า7ทร. โค7ทร. โล7ทร. โส7ทร. 7อู. 5พล. ไพร่7 5ศก. อัฐ5 อัฐ6ม อัฐ7มี ี7วี ู7วี ถ7วี. ส7วี. ฏ7วี. น7ตี ร7ตี อ7ตี า7ตี ู7ตี า7สี ณ7สี ห7สี เว7สี ู7สี ิ7สี ก7สี โบ7ลา ู7ลา อจ7ลา เว7ลา บิว7ลา มข7ลา เอ7ลา ี7ลา โร7ลา โอ7ลา โซ7ลา ิ7กะ ุ7กะ อ7กะ นว7กะ ิณ7กะ เภ7กะ ัย7กะ ิย7กะ รธ7กะ ัฏ7กะ ัฒ7กะ ิช7กะ ศต7กะ มล7กะ 7ทุ. โซ6ร ธ6นู ัส7ดุ. ร7คต ดง7คต 5กง. เ6กง 7ฎก ณ7มี ว7มี ศ7มี ู7มี ี7ติ รุ7ติ สุ7ติ ฮ7ติ อร7ติ วีส7ติ ติงส7ติ คุป7ติ มุต6ติ ภัต6ติ ก7ดี ต7ดี พ7ดี ม7ดี ย7ดี ศ7ดี อ5ดี า7ดี ี7ดี ุ7ดี ุว7ดี ดิบ7ดี นัก7 กุณ5 กุณ6ฑ์ 7ซี. 5ที. จน6ที ี7รา ู7รา ์7รา ิต7รา ม7รา ย7รา .มก7รา รบ7รา ลิก7รา เห7รา. 7กฎ. 7กฏ. 5หะ ค6หะ นิค7หะ เค7หะ ท6หะ เท7หะ ู7หา ฬ7หา ค7หา เน7หา ่7หา 5มะ ร6มะ ห6มะ ต6มะ 5หู 5ดำ ส6ดำ 7คำ 5สะ ว6สะ 5ฐะ ส6ฐะ 7ธะ 5พี. ร6พี ทร7พี ปฐ7วี ิ7ดา ษ7บ ษ7ป ิ7ระ ี7ระ ู7ระ ช5ระ ิต7ระ ทห7ระ ท7ระ. ุก5ระ. สว7ระ ัส7ระ ิส7ระ เป7ระ อ7ยา. เก7ยา รร7ยา สา7วก ิ7ธิ ุท7ธิ. ิท5ธิ. .สิท6ธิ. บุริมสิท6ธิ. ไกรสิท6ธิ. ป7ธิ ขัดสมา6ธิ พยา6ธิ. 5ษี. ด6นู ิ7วะ ี7วะ ุ7วะ ี7วก ย7วะ เท7วะ ไท7วะ ัท7วะ าช7วะ ไศ7วะ 7ถะ 7ษะ 5พร. 5ผง 5ธี า7ชะ ิ7ชะ ร5ชะ ส7ชะ โอ7ชะ 5ฆะ 5ฟะ า7ฟี ิ7ถี ร7ถี 5ฮา 5ญี 5ผา 5หิ. สิน7ธพ สิน7ธุ. สิน7ธู 5ชู 5ศะ ิ7ละ ุ7ละ ู7ละ ย7ละ ด7ละ .วส7ละ อเจ7ล เต7ละ ่7ละ น7ทะ ท7ทะ ส7ทะ น7ตุ. รร6ตุ มา7ตฤ ิ7รพ า7รพ. ไก7รพ 5ศุ. า7ถา า7สพ พ7สพ ุ7ขี 7สอ. า7ดะ 5บะ. 5ยี. ห6ยี 5กี. 5หก. ง7อร. ม7อร. ี7วร ส7วร. พู7นท 5จร. โ6จร. 7ศพ. โป7ลี 7ภพ. 7นพ. 7ณพ. า7รก. ทก7รก ย7รก. ยว7รก. 5มล. ุ5บล. โล7บล. 5ชล. 5ชก. 7โพ 5ณู 7ปี. า7บี. 5ฏะ. า7ฬี 5ปะ. ฉ6ปะ ส6ปะ ู7ลู 5ตู. 5ยู. ิ7ชิ 7ฆี. ิ7จี ี7จี ุ7จี ู7จี เว7จี 5ศี. 5มน. 5ยอ. ผ6ยอ. 5สง. 7สร. 5ดก. ส6ดก 7โก. ก7ฝ า7มก. 5ซอ า7ขะ ู7ขะ ส5ขะ ร7ษา 5ภะ ศ7ภ ิ7ลก ุ7ฎี ศา5ข 5สา. ั6สา 7ซู 5ษก. ษ7ฐี 5ดม. ส6ดม ด7ลม. ส7ลม. ว7ลม. ี7ลม. 5ศล. นิ7ยต 7งู 5จะ. า7สก. โป7สก 5ยศ. 5ธก. 5กบ. 7คู. ส5มา. 5แล. 5พก. โส7ภ รร6ดิ. า7วก. น7นร. 5จอ. 5จบ. 5คบ. 5ฉล. ม7รม อบ7รม ิ7รม. ี7รม. 5ซน. 5ดอ. 5กิ. ซู7ซุ ซู7ฮก 5บส. น7รน. ตก7ลง ม7ตน ตัว7ตน ี7วง ศ7วง. แตร7วง แวด7วง า7ฑู 5หด. อบ7นบ นา7คร. ี7ฑา ู7ดู า7รภ. า7ฝ ล7รบ. ว7รบ. อ7รบ. า7รณ. น7ยง ม7ยง ุ7ยง ิ7ยง ิ7ยน หา7พน า7งิ ช7รถ. น7รถ. ส7รถ. ัน7ธร. มณ7ฑก มณ7โฑ มร7กต มร7ฑป ยอด7อก โล่ง7อก ยืด7อก ห7ห 5ทด. ว7นม. ทพ7นม. โค7นม ษ7ฎร. ิ7ปุ ิ7ปู ี7รอ. ย7ลำ อ7ลำ ้7ลำ น7ทม. ป7ทม. วก7วน อล7วน ิ7จล. ช7ญะ ี7ข ศีล7 5ธม. สม7รด สัก7วา สัป7ด สัป7ท า7สม. อ7สม. า7นล. ี7รุ ู7รุ เน7รุ ง7หล สีห7นุ 5ภร. 5จด. บ7ยก. ดิ7ศร ร7ศร อพ7ยพ ร7ชร. รส7กา ลส7กา อาจ7อง ี7มู อึง7อล ุ7ชุ ุ7สภ. เก7ชา เก7ศา ช7ตก. บ7ตก. เข7ฬะ ห7ณี อ7ปน. ย7ชม. เบื้อง7 5คะ ง7ออ. อ7ออ. เรือ7ธ เรือ7บ เลี้ยว7 5กก. เ6กก อ7ขอ. า7กอ. แด7วู บ7ยล. โฉ7เก โด7มร โต7มร 7โผ โท7โส ้7ปด. 7คี. โย7นก. โส7มม 7ฬส. ต7ถิ 7โฮ ใจ7 5ฟง ไช7โย 5พต. กรร7กศ ล7บก. ศ7ยป. า7นน. ุ7ฎา ู7ฏา า7มอ. ท7โท ุ7ทส จ่า7ร ฬ7หี า7ฒะ ธต7รฐ ท7คล. ต7ถร. ิ7ฐิ ป7ผะ พฤ7ษภ. ิ7ธุ า7ฬก. ห7สิ ฏ7ฏิ. ษ7ฏิ. ศิษ7ฎิ ษ7ฏี 5ษส. ิ7ปิ ู7ริ. ฑ7ฑุ ษ7ฏุ า7ตา ว7ตก ง7ตก เก6ตุ. ส7ตุ ลิ7บง ฮ7โ",
- ["length"]=53112,
+ ["data"]=".ชี5วั .ทัศนู5 .ที่3 .บท1 .รง4 .ราย3 .ลำ3 .สน5ท .สู3ต .ใบ3 2ก1ก ก4กม กก4ส 2ก1ข ก4ขค กข5คณ ก4ขช กข5ชา ก4ขณ ก5ขณะ ก5ขณา ก4ขบ กข5บุ ก4ขภ กข5ภั ก4ขม ก5ขมั กข5มา กข5มู กข5ลา ก4ขเ กข5เท กข5เว ก4ข์ ก1ค กง5บว ก1จ ก1ช 2กซ ก3ซิ กญ5จน กฎ5หม กฎ5เก กฏ5หม ก5ดิน ก1ต ก4ตด กต5ดิ ก4ตส ก4ตเ ก1ท ก1น ก4นด ก4นธ ก1บ ก1ป กป4ร ก1พ ก1ฟ ก1ม ก4มม กม5ลา ก4มส ก4มเ กย5มุ ก3ย้ กร5กฎ ก5ร5ณั กร5ต๋ 1ก4รร กร5รา กร5ลา ก5ราค ก4รู กร5ไฟ กล5นค กล5บิ กล5มห ก2ว ก5วัต ก5ษณน ก3ษณะ ก5ษณา ก5ษมา ก5ษมี กษ5เท ก1ส กส4น ก4สโ ก1ห 3กอน กอ5อิ กะ5ถั กะ5ผล 4กะร 1กั 1กา กา5กะ กา5ดู กา5นี กา5น้ กา5บอ กา5ฝา กา5ร่ กำ5ด้ กำ5ทอ กำ5ผล กิ5กะ 1กิจ กิ4ต กิ5นี 3กิริ กี5รณ กี5รต กี5สถ 1กุ กุ5งอ กุ5ชิ กุ5ฎุ กุ5มุ กุ5รร กุ5ลี กุ5แห 1กู กู5ปร กู5รข กู5รม กู5ลิ ก1เ ก1แ ก1โ ก1ไ ก่5กอ ก่5บ้ ก่5ป่ ก์5ท็ ข2 ขม5หิ 4ขลา ขอ5ขม ขอ5ง้ ขอ5อภ 1ขั 1ขา ขา5ก๊ ขา5ทน ขิ5ปส ขิ5ไณ ขี้1 ข่5มุ ข่5หง ข้าว3 ค1ค คช5สี คช5เช คช5เม ค4ณิ ค4ทร คท5รี คท5วอ คน5ยอ 4คนิ คป5ซู คป5ผก 3คมน คม5ฟร คม5ลอ 2คย คร5ซอ คร5นอ คร5นี คร5พน คร5ฟิ คร5มเ คร5ร้ คร5ลิ คร5หา 4ค5รัก คฤ5หบ คฤ5หา คฤ5โฆ คล5คู ค2ว คว5ทอ 3ควา 2คส คส5ติ คห5กร คห5นิ คห5บด คห5สถ 3คอน 3คัน 1คา คา5ปู คา5พจ คา5พย คา5รว คา5วจ คำ5ดี คำ5โอ คำ5ไก คี5รี 1คุ คุ5ณู คุ5ลี 4คุ์ คู5ปอ คู5ลอ 2ค1เ ค1โ 2ค์ ค์5จำ 1ฆา ฆา5ณั ฆี5ยก ง1ก ง4กห งกะ4ร ง4กเ ง4ก์ ง1ข ง4ขก ง4ขต ง1ค ง4คจ ง4คช ง4คญ ง4คธ ง4คบ ง4คป งค5วั ง4คศ ง4คโ งฆ5ปร งฆ5สภ งฆ5เถ งฆ5เภ ง1ง ง4งเ ง1จ ง1ฉ ง1ช ง4ชี ง1ซ ง1ด ง1ต ง1ท ง1น งบ5ดุ ง1ป ง1ผ ง1พ ง1ม ง1ย ง1ร ง1ล ง1ว ง4วเ ง1ส งส5กล งส5กุ ง4สบ ง4สพ งส5พย ง4สภ ง1ห งห5นา ง4หบ งห5บั งห5รา 1งา งา5ช้ งา5รำ งู5สว ง1เ ง1แ ง1โ ง1ไ ง่5งอ จ1จ จ4จว จ1ฉ จด5จ่ จต5จำ จต5มู จป4ก จมบ5พ 3จริ จอ5งอ 1จั 1จา จา5มร จา5มี จา5รึ จำ5ทว จำ5อว 1จิ จิ5จู จิ5ตอ จี5ดี จุ5ฑา จุ5สม จ1เ ฉ2 ฉก5ฉว ฉก4ษ ฉท5ทิ ฉร5ฉิ 1ฉั 1ฉา ฉา5ก๊ ฉา5พย ช1ช ช1ฌ ช4ฌก ช4ฌฆ ช5นีก 4ชน์ ชฟ5รอ ชฟ5โร ชร5กล ชร5ริ ชร5ฤก ชร5หล ชร5หึ ชร5อุ ชว4โ ชอง4 1ชั 1ชา ชา2ต ชา5ตร ชา5ปี ชา5มต ชา5ยต ชา5สง ชำ5งั 3ชิต ชิ5นี ชิ5รณ ชิ5แก ชี5ผะ ชี5ผ้ ชี5ฟอ ชี5รณ 3ชีว ชี5วน ชุ5ติ ชุ5ลด ชู5ปก ชู5ปถ ชู5ปโ ช่5อิ ช้5สอ ช้5ได ซน5ทร ซ5ราม ซล5มอ 1ซั 1ซา ซา5ชู ซา5มู ซิ5ตร ซิ5ฟิ ซิ5แล ซี5ดี ซี5นี ซี5รา ซี5ริ ซี5รี ซี5ร็ ซี5ลี ซู5ซู ซู5บิ ซู5ริ ซู5ลิ ซ1เ ซ1โ ซ่5ง่ ซ่5ซ้ 1ซ่า ญจ5ดุ ญ4จน ญ5จ5นท ญ5จ5นบ ญ5จนา ญจ5บร ญ5จ5มบ ญจ5รง ญจ5วี ญจ5ศี ญ4ฉน ญ1ช ญ1ญ ญประ4 1ญา ญา4ต ญ่5บ้ ฏ1ฐ ฏ4ฐบ ฏิ5ทิ ฏิ5ปท ฏิ5ปุ ฏิ5สน ฏิ5สว ฐ4ภั ฐม5ฌา ฐม5พย ฐม5ฤก 1ฐา ฐา5นี ฐุ5ชุ ฑา5มณ ฑา5สถ 3ฑูร ฒิ5สภ ฒิ5สม ณ1ฑ ณ4ฑก ณ4ฑฆ ณ4ฑน ณ5ฑนะ ณ4ฑบ ณ4ฑม ณฑ5ลา ณ4ฑส ณ5ฑสก ณฑ5สถ ณ5ฑ5สี ณฑ5โล ณ4ฑ์ ณย5รั ณ1ร ณ4วา ณสม4 ณห5พล ณห5ภู 1ณา ณา5ปี 1ณิ 1ณี ณี5สง ณู5ปโ ด1ก ด4กง ด4กด ดก5ดื ด4กเ ด4กแ ด1ข ด1ค ดง4ค ดง5ออ ด1ช ด4ชน ด5ชนะ ด1ด ด4ดเ ด1ต ด1ท ด1ป ด1พ ดร5ลิ ด3ร้ ด1ส ด4สก ด1ห 1ดั ดัส5ต 1ดา ดา5กอ ดา5มุ ดา5รก ดา5สว ดำ5ฤษ ดิ5ทอ ดิ5ทิ ดิ4บ ดิ5วร ดิ5ศว ดี5ดี 3ดีน ดี5ฝ่ ดี5รอ ดี5ลิ ดี5วี ดี5หม ดี5หว ดู5ถู ดู5ปอ ดู5รั ดู5หม ดู5แค ด1เ ด1แ ด1โ ด้5ยิ 2ด์ ด์5สป 2ตก ตก5ร้ ต1ค 2ต1ช 2ต1ต ต4ตภ ต4ตส ต4ตโ ต5ถกะ ต3ถา ต5ถุป ต5ถุศ ตถ5เล ตทัศนูป5 2ตน ตน5ฟอ ตน5วร ต4นาธ 2ต1บ ต4บช ตบ5ชว ตป5นี ต1ภ 2ตย 4ตรก ตร5กิ ตร5งอ ตร5จี ตร5จุ 4ตรฐ ตร5ตร ตร5ทว ตร5ผล ตร5ฝร ตร5พล ตร5รง ตร5ลด ต5ริยา ต4รู 2ตร์ ตฤ5ตี ตล5รั ต1ส ต4สค ตส5วา ตส4เ ต4สแ ตส5แต ตอ5ม่ ตอ5รอ ตะ5ใภ 1ตั 1ตา ตา5กล ตา5กว ตา5นึ ตา5ปร ตา5ปล ตา5ผิ ตา5ฟู ตา3มห ตา5มอ ตา5มะ ตา5ฬี 3ติก. ติ5จู ติ5ช่ ติ5ทิ ติ5นร ติ5บอ ติ5มศ ติ5มส ติ5มอ ติ5ยม 4ติ์ ตี5ขล ตี5คู ตี5ตื ตี5รว ตี5ลั 3ตี้. ตุ5ตถ ตุ5ทส ตุ5ป่ ตุ5มห ตุ5รก ตุ5ลั ตุ5สด ตู5ดิ ต1เ ต3แล ต1โ ต่5ถา ต่5ว่ ต่5สว ต้5ก๋ ต้5ตอ ต้5ฝุ ต๋5เต ต์5คล ต์5ฟู ต์5ศต ถด5ถอ ถม5ถื ถล5ไถ ถว5ไม ถะ5ถั ถ4าธ ถา5วร ถ4ีย ถี5ลิ 3ถุน ถ่5ถอ ถ่5ถา 4ทกา ทค5ติ ทค5นี ทด5รอ ทด5ลอ ทธ5คย ท5ธชะ ทธ5ฎี ทธ5ปฏ ทธ5พร ทธ5รั ทธ5ศต ทธ5สี ทธ5อง ท5ธิก ท5ธิช ท5ธิบ ท5ธิป ท5ธิผ ท5ธิพ ท5ธิภ ท5ธิร ท5ธิฤ ท5ธิศ ท5ธิโ ทธ5เจ ทพ5ธิ ทพ5ยุ ทฟ5ลอ 2ทย ท5ยาน ทร5คต ทร5คร ทร5ธน 3ทรร ทร5สโ ทร5หว ทร5หึ 1ทรา ท5ราก 4ท5ราห 1ทรี ทว5ทห ทว5สถ ทศ5ทิ ทศ5วร ทสน5ท ทส5โก ทห5วั ทห5ฬิ 1ทั 1ทา ทา5ฐิ ทา5ฒิ ทา5นอ ทา5มร ทา5รพ ทำ5ขว ทำ5ซ้ ทำ5ท่ ทำ5โท ทิ5ฆั ทิ5ฐิ ทิ4พ ทิ5พา ทิ5วง ที5นว ที5นี ที5รา ทุ5คต ทุ5ลั ทุ5ศี 1ทู ทู5น่ ท1เ ท์5ดอ 1ธร 4ธรส 4ธรั 1ธา ธา1รณ ธิ5ฤท ธิ5ศี ธิ5สม ธี5รี ธุ5ดง ธุ5ลี ธู5ปน น1ก น4กค น4กป นก5ยู นก5รู น1ข นข5ลิ น1ค นค5ริ น1จ น4จอ นจ5อน น1ช น4ชญ น1ซ น1ด น4ดร น1ต นต5กว น5ตกะ นต5ดิ น4ตท นต5ทิ นต5ปิ น4ตภ น5ตระ น5ตรั น3ตรา น5ต5ริ นต5ฤด น3ติ น5ตุก น5ตุฏ น4ต์ นถ5ธุ นถ5รจ นท5ขี นท5นน น5ทนะ นท5ผล นท4ย น5ทรง น5ทรล น5ทรั น5ทรุ นท5ฤก น5ทลา น5ทวย น3ทอ น1ทิ น3ที นธ5กร น5ธกะ น5ธนะ น5ธุก น5ธุร น5ธุว น5ธุศ นธ5ไม น1น น4นต น4นท น4นร นน5รา น1บ นบ5นอ น1ป น4ปจ นป5จู น4ปท น1พ นพ5ปฎ นพ5ศู นภ5ศู น5ยนต นฤ5คห นฤ5ปเ นฤ5เท นฤ5เบ น1ล น4ลล นว5ร่ น1ศ นษ5กร น1ส น4สซ น4สส นส5แด น4สโ น1ห นอ5กะ 3นอน 1นั 1นา นา4คร นา5ณั นา5ปร นา5รย นา5วต นา5วล นา5สณ นา5สน นา5สว นา5ฬิ 4นาะ 1นิ นิ5ด้ นิ5ฟอ นิ5มน นิ5ยม นิ5ยา นิ5รอ นิ5ลุ นิ5วร นิ5สง นิ5สถ นิ5สี นิ5แด นี5มี นี5มู 1นุ นุ5พย 1นู 2น1เ น1แ น1โ น1ไ น่5อี 3น้อ 1น้ำ น์5สไ บ1ก บ4กษ บกิส5 บ4กแ บ1ข บ1ค บ4คท บค5ที บ4คโ 1บดี บ1ท บบ5ฉบ บบ5ฝึ บบ5อย บ1ป บ1พ บร5มี บ1ส บ4สบ บส4เ บ1ห บอ5ดี บอ5ระ 3บอล 1บั บัพพาชนี5 1บา บาจ5ร บา4ต บา5ตอ บา2ท บา5ทา บา5ทุ บา5รน บา5รอ บา5สม บิ5ชอ บี5คิ บี5ร่ 1บุ บุค3 บุ5ตร บุ5ถุ บุ5รพ 1บู บู4ช5น บู5ติ บ1เ บ1แ บ1โ บ๊5จี บ๊5เบ ปก4ส ป4จั ป4จา ปฐ5ปท ปฐ5พี ปต5ถก ปต5พล ป4ทา ป1ป ป4ปเ ปม5ด้ ป4ยุ ปร5ตอ ปร5ตี ปร5ตุ ปร5ผั ปร5ษณ 1ประ ปร5แก ปร5แท ปล5ญว ป4วา ปส4ต 1ปั 1ปา ปา5ฐก ปา5ณก ปา5นี ปา5ปิ ปาร4 ปา5รเ ปิ5ดอ ปิ5ยภ ปิ5ยอ ปิ5หก ปี5ชี ปี5มะ ปี5ฬก ปี่3 ปุ5คล ปุ5ถุ ปู5จ๋ ปู5ติ ป1เ ป1ไ ผก5ผั ผณิ5ศ ผน5ผั ผ4นิ ผ4ยา ผล5พล ผล5ไม ผ4สา ผี5ดิ ผี5ตอ ผี5ถ้ ผี5ห่ ผ้า3 3ฝอย ฝี5มะ ฝ่5ฝั 3พจน พจ5นี พช5ฉล พช5รา พท5ริ พทัก4 พน5ทะ พ4นั พนิ4 พ1พ 2พ2ย พย5ก๊ พร5ชย พร5ซี พร5มี 1พรร พ4รู 3พฤก พฤ5ฒา พล5ทิ พล5ร่ พส5เฟ พอ5คว พอ5สม 1พั 1พา 4พาจ พา5ชน พา5นร 1พิ พิ5ถั พิ5ถี พิ5ปล พิ5รอ พิ5รี พิ5ลึ พิ5ศุ พิส5ม พี5ระ พุ5ชิ พุ5พอ พู5ทว พู5พอ พ1เ พ4เย พ่5ป๊ พ่อ3 พ้5ท้ 2พ์ พ์5ดี 1ฟั 1ฟา ฟิ4ลา ฟี5ฟ่ ฟู5ฟ่ ฟ1เ 1ฟ้ ภค5ทร ภค3ว ภช5นี 1ภั 1ภา ภา5ณก ภา5ณว ภา5รด ภา5รต ภา5รย ภา5วน ภิ5ชน ภิ5มห ภิ3ร ภิ5สม ภิ5ไธ ภุ5ชง 1ภู ภู5ฏา ภู5ริ ม1ก ม4กม ม4กษ ม1ข ม4ขล ม1ค ม4คค ม4คอ มค5อิ 1มงคล มง5ฟอ ม1จ ม1ช มช4ว ม1ซ 3มณฑ มณ5ฑน มณ5บร มณ5พร มณ5เฑ มด5ยอ มด5ลู ม1ต ม4ตธ ม4ติ ม4ตไ มต5ไต ม1ท 3มนตร มน5ฮั ม4นุ ม1บ มบ4พ ม1ป มป4ช มป5ฤด มป5ฤๅ ม4ป์ ม1พ ม4พก ม4พว ม1ภ มภ5กถ ม1ม ม4มเ ม4มโ มย5รา 3มรร ม3รั ม3ริ มฤ5คิ มฤ5เค มล5ทิ ม3ลิ ม3ล้ ม1ว มว5มอ ม4วล ม1ส มส4เ มห5กร ม3หน มห5ภา ม5หาญ ม5หาย มหา3ส มอ5ขว มอ5คร มอ5ดู มอ5ตำ มอ5นว มอ5นอ มอ5ระ 4มอั มะ5ถั มะ5ฝ่ มะ5ฮอ 1มั ม4ั่ 1มา มา4ก มา5ดร มา5นร มา5ป่ มา5พจ มา5มก มา5มุ มา5ม่ มา5ยณ มา5ยอ มา5ร่ มา3ว4 1มิ มิ5กภ มิ5ชิ มิ5ซร มิ5ตล มิ5ถิ มิ5น่ มิ5ฟล มิ5ลำ มิ5แพ มี5ขม 3มืด 1มือ. มุ5ทะ มุ5ทั มุ5ทิ มุ5ทุ มุ5ฮั มู5ซี มู5ป่ มู5รต มู5ลิ มู5หย มู5หร มู5ฮั มู5แด มู5แผ มู5แฮ ม1เ ม1แ ม1โ ม1ไ ม4่า 3ม้า ม์5ภิ ยก5ย่ ย1กร ย4ก5ร้ ย1ค ยง5บ่ ยง5ฝ้ ยง5อย ยจ5คร ยด5ย้ ย1ต ย1ท ย1ธ ยบ5ร้ ย1ป ย1พ ย1ภ ยม5ยอ ยม5รา ยม5หา ยม5อี ย4มิ ย1ย ยย4ส ยร5ถี ย5รบั ยล5ไท ยว5ข้ ยว5จ๊ ยว5ดอ ยว5นี ยว5ย่ ยว5รั ยว5ไส ย1ศ ย1ส ย1ห ย4หฐ ยห5ฐา ย4หป ยห5ปร ยอ5บี ยอ5รม 1ยั 1ยา ยา5กฤ ยา5กว ยา5ฉุ ยา5ณม ยา5ณว ยา5ถ่ ยา5บร ยา5สล ยา5สี ยา5ฬั ยำ5ทว ยี5รา 1ยุ ยุ5คล ยุ5ตก 4ยุภ ยุ5แย ยุ5แห ยู5ถิ ยู5ฟ่ ยู5ริ ยู5ไน ย1เ ย1แ ย1โ ย์5กล ย์5ถ่ ย์5มน ย์5หน 2รก รก5ซอ รก5ซึ รก5ซ้ ร1กร รก5รา รก5ร้ รค5พว รง5พย รง5รอ รจ5ถร รณ5คด รณ5ตร รณ5ถั รณ5พฤ รณ5สถ ร5ณาญ รณู5ป 4รณ์ ร1ด ร4ดป ร4ดแ ร4ดโ ร4ดไ รด5ไอ รถ1 รถ5พย ร1ท ร4ทฤ รท5ฤด ร4ท4ว รท5วิ รธ5ขึ รธ5สร รธ5เก รน5ทุ 4รนา ร1บ ร4บค ร4บถ รบ5ถ้ ร4บม ร4บั ร4บไ รบ5ไก ร1ป ร4ปณ ร5พชา ร5พชิ รพ5ทิ ร1ภ ร4ภย รม5รอ รมาว5 รม4เห ร4ยั รร4ก รร5คา รร5จถ รร5จว รร5ชิ รร5ณึ รร5ถา รร5ยง รร5ยเ รร3ล รร5หา รร5แท รร5แส รร5ไก รร5ไต รล5ออ รศ5นี รษ5ฐิ รษ5ตร ร1ส ร4สก ร4สช ร4สเ ร4สโ ร3หิ ระ1 ระ5สา ระ5หก 5รังส 3รัฐ 1รัต รา5กฏ รา5กฤ รา5กว 1ราช รา5ชู รา5ดร รา5ดว รา5ดู รา5ม่ รา5วณ รา5สง รา2ห รา5หุ รำ5งั รำ5จว ริ5ซึ ริ5ตร ริ5ทึ 4ริพ ริ5มน 4ริยจ 4ริยย 4ริร ริ5แล 4ริ่ รี5คู รี5ฑา รี5ดู รี5ตร รี5ตอ รี5มู รี5รั รี5รา รี5ริ รี5ลั รี5ลิ รี5ล่ รี5สอ รี5สะ รุ5กว รุ5ขร รุ5คร รุ5ทว รุ5ธิ รุ5มุ รุ5วน 1รู รู5ที รู5นี รู5บิ รูป5ก รู5ปิ รู5มา รู5มู รู5หร 2ร1เ ร1โ ร่5กะ ร่5ตร ร่5ร่ ร่5หล ร์5กิ ร์5กี ร์5ดิ ร์5ติ ร์5ตู ร์5ทิ ร์5ฟอ ร์5ฟู ร์5ลี ร์5วอ ฤ4ดา ฤป4เ ฤษ5ฎี ฤห5บด ล5กนะ ลก5ลา ลก5วั ล3กอ ล4กัย ลข5คณ ลข5หม ลชี4 ลด5ระ ลด5ลิ ล4ดา ล1ต ล4ตฟ ลต5ฟอ ลบ5ตะ ลบ5มุ ลบ5ล้ ลบ5ไส ลป5ตอ ลม5งว 2ลย ล1ล ล4ล์ ล3วี ลว5ไห ลส5ไต ลห5กุ ลอก5ล ลอ5จี ลอ5สร ละ5ผล 1ลักษ ลา5กล ลา5นี ลา5บร ลา5ป๋ ลา5พอ ลา5มี 3ลาร ลา5รอ ลา5ร้ ลา5ฤก ลา5ส้ ลิ5ก่ ลิ5จู ลิ5ซิ ลิ5ซึ ลิ5ตอ ลิ5นอ ลิ5น่ ลิ4บ ลิ5บา ลิ5ฟอ ลิ5มู ลิ5ไท ลิ5ไล ลี5ตะ 3ลีน ลี5ผล ลี5ลา ลี5วู ลุก5ร ลุก5ล ลุ5ล่ ลูก1 ลู5ที ลู5มิ ลู5ลอ ลู5ออ 2ล1เ 2ล1แ ล1โ ล่5ติ ล่5ที ล่5หล ล่5ออ ล้5โพ ล์5สต ว3กร วก5ว่ ว5การ ว1ค 1วงศ วจ5ตร วจ5สอ วช5นี วด5ถ่ วด5มว วด5ยิ วด5ระ วด5ลา วด5ล้ วด5อ้ ว1ต ว4ตฉ วน5ถี วน5ท้ วน5ผส วน5รว วน5ร่ วน5อิ วน5อุ วบ5ยอ วบ5รว วบ5รั วบ5ฮา ว1ป ว1พ วม5รอ ว3มู วย5กิ วย5ก้ วย5จี วย5ริ วย5รื วย5ล้ วย5ไท วย5ไม วร5ธิ วร5มณ วร5มห ว4รย 1วรร4 ว4ร์ วล5ระ ว1ห วอ5ชิ 1วั วัน3 วันต5 วันท4 1วา วา5ดะ วา4ต วา5ตก วา5ติ วา5นร วา5นึ วา5บร วา5มน วา5รณ วา5สนะ วา4ห วา5หน วา5หิ 1วิ วิ5กล วิ5กส วิ5คห วิ5จุ วิ5ดี วิ5ตก วิ5ตร วิ5ตี วิ5ถี 3วิท วิ5ทิ วิ5ธุ วิ5ธู วิ5ปก วิ5ปฏ วิ5ปล วิ5ปว วิภู5 วิ5มล วิ5รง วิ5วร วิ5ศร วิ5ศุ วิ5ษุ วิ5สร วิ5สฤ วิเล5 วิ5ไล วี5คู วี5ชน วี5ดิ 1วุ ว1เ ว1แ ว1โ ว่5ห้ ว้5ชื ว้5ทุ ว้5ลา ว์5ลิ ศ1จ ศน5อุ ศพิ4 3ศรี ศ2วร ศษ5ซ้ ศษ5เก ศษ5เห 1ศั ศัก5ร 1ศา2 ศา5กา ศา5ขบ ศา5นุ ศา5ภิ ศา5รย ศา5รั ศา5ริ ศา5ลา 1ศิ ศิ5รพ ศิ5รว ศิ5ศี 1ศึ ศุ5กล ศู5ลิ ศเจ5ร ษ3ฎา ษฐ5ภค ษ5มณี ษ4มา 1ษั 1ษา ษา5คเ 1ษิ ษ์5พย สก5ลิ สก5ลึ สก5วั สก5วา ส4กา 4ส4กุ สข5บุ สง5ขล ส1ซ ส5ดิก ส5ดิน ส5ดิภ ส5ดิม สต5ทิ ส3ตรา 2สต์ สถ5วี 4สถ์ สน5ธย สน5ธิ ส5นียะ ส4นุ สนูป5 ส4ปา สพ5ติ ส2ม สม5ดุ 3สมบ สม5ผส สม5ผุ สม5ผเ สม5ยอ สม5ฤด สม5ฤต สม5หว ส5มัท ส5มัน สมุ4 สรร5ช สร5ลอ สล5บร สว4ก สว5ยม ส4วร สว5ริ ส4วา 4สวิ ส1ส สห5กร สห5กา สห5ชา สห5ธร สห5ปร สห5พั สห5ภา สห5รา สห5ศึ สอ5พล สอ5พอ สะ5ใภ 1สั สัญประ5 สัน3ถ สัม3 1สา สา5กิ สา5คเ 4สาธ สา5นึ สา5มน สา5มี สา5วพ สำ5ออ สำ5โร 1สิ สิ5ถิ สี5ข้ สี5ชอ สี5ดว สี5ตล สี5ตโ สี5ถ่ สี5ผึ สี5ฝุ สี5ละ สี5ลั สี5วล 1สุ สุ5กร สุ5กำ สุ5กี สุ5ขิ สุ5ขุ สุ5คต สุ5คร สุ5นี สุ5บร สุ5บิ สุ5ปร สุ5มน สุ5สง สุ5ไห 2สุ์ 1สู ส1เ ส4เฟ ส1โ ส4โก ส4โค 3ส่ว ส่5ไค ส้5กร ส้5ติ ส้5ไก 2ส์ ส์5หย ห2 2ห1ก หก5ระ หก5ล้ 5หการ หง4ส หง5สา หฤ5หร หฤ5โห หล5สะ หอ5คอ หอ5สม 1หั หา5กฐ หา5บพ หา5ปณ หา5พร หา5รื หา5ฤก หิ5รก หิ5ศว หุ5คู หู5กร หู5กว หู5หน ห้5ท่ ห้5ท้ ห้5ร้ 2ห์ ห์5กร ห์5สน ฬว5รา ฬห5บู 1ฬา ฬา5มณ ฬา5รึ อก5ซอ อก5ถล อก5รณ อก5รี อก5รู อก5ร่ อก5ฤท อก5ลว อก5ลอ อก5ลา อก5ล่ อก5ว่ อก5ใบ อค5ที อฆ5สง อง4คม อง5ถิ อง5บร อง5บิ อง5ฟอ อง5ฟุ อง5ระ อง5อุ อง5อ้ อด5ถอ อด5น่ อด5ฝา อด5ยอ อด5รั อด5อย อด5ออ อด5อุ อด5อ้ อ3ดิ อต5ดอ อต5ด็ อต5ไว อ1ท อ4ทค อท5คอ อน5ง้ อน5ดร อน5ทำ อน5ผั อน5ฝู อน5ฟิ อน5ย้ อน5รา อ4นา อ4นุ1 อบ5ช้ อบ5ถา อบ5บี อบ3อ อบ5ไล อป5กิ อป5ติ อป5พร อป5พล อป4ร อป5วา อป5โล อพ5ริ อฟ5ฟิ อฟ5ฟี อฟ5ริ อฟ5ไล อ4ภั อม5ฎอ อม5ดอ อม5ถอ อม5ยิ อม5รา อม5ร่ อม5ฤต อม5หล อม5หว อม5ห้ อ5มอน อย5กอ อย5ก๋ อย5นว อย5ร่ อย5ร้ อย5อิ อ4ยา อย5ได อร5ชุ อร5มน อ3รั อ3รา อ1ริ อ1รี อ3ร้ อร์1 อล5จี อล5นี อล5ฟ่ อล5มอ อล5หม อ3ลั อ1ลิ อว5รุ อศ5กร อษ5ฐช อษ5ฐภ อส5กา อส5ติ อส5นี อส5พล อส5ฟอ อส5มิ อส5เฟ อส5แอ อส5ไพ อ1ห ออ5อว อะ5ธี 1อั 1อา อา5ค5เ อา5ฏา อา5ณั อา5ดุ อา5ดู อา2ต อา5ถร อา5นน อา5ปณ อา5มล อา5ย5ต อา5รด อา5รต อา5รบ อา3รย อา5ลป อา5วร อา5วี อา5สว อำ5ยว อำ5อว อิ5ชย อิ5ดะ อิ5ระ อิ5ศว อี5จู อี5ซู อี5ยิ อี5รุ อี5ลุ อี5ศว อี5หร อุ5กฤ อุ5กล อุ5คร อุ5ดม อุ5ดร อุ5ด้ อุ3ตรา อุ5ตุ อุ5ทร อุ5ทิ อุ5ทุ อุ5ธั อุ5บล อุ5บ๊ อุ5มง อุ5รพ อุ5ลก อุ5แว อู5คู อู5รา อู5ลา อ1เ อเป5ร อเสก5 อเส5ข อเห5ต อ1แ อ1โ อโร3 อ1ไ 3อ่อ อ่5อว อ่5อ่ อ่5โถ อ้5อว อ้5โถ อ้5โล ฮก5ฮา ฮก5ฮื ฮน5รี ฮฟ5วี ฮล5ซิ ฮล5ดิ 3ฮอล ฮา5นอ ฮา5ป่ ฮา5ร่ ฮิ5บร ฮี5บร 3ฮื้ ฮู5ลา ฮู5ล่ ฮ1เ ฮ่5กึ ะ1ก ะ1ข ะ1ค ะ1ง ะ1จ ะ1ฉ ะ1ช ะ1ซ ะ1ด ะ1ต ะตะ4 ะ1ท ะ1น ะ1บ ะ1ป ะผี4 ะ1พ ะ1ม ะ1ย ะ1ร ะ1ล ะ1ว ะ1ส ะ1ห ะ1อ ะ1เ ะ1แ ะ1โ ะ1ไ ั2 ัก5ง่ ัก5ซ้ ัก5ตบ ัก5ผ่ ัก5ฝ่ ัก5ยอ ัก5ยิ ัก5รั ัก5ร้ ัก3ล ัก5วิ ัก5ษร ัก5อิ ัก5อี ัก5อ่ ัก5ใค ัก5ใฝ ัค5ฆิ ัค5ซี ัค5สถ ัง5ถึ ัง5ศุ ัง4ส5ว ัง5อว ัง5อุ ัง5ฮี ัจ5กล ัจ5ญะ ัจ5ถร ัจ5นึ ัจ5โจ ัช5ฎา ัช5นี ัช5พย ัช5พื ัช5รา ัช5ริ ัช5สม ัช5เร ัช5แพ ัช5โญ ัญ1 ัฏ5ทุ ัฏ5สง ัฐ5ทิ ัฐ5บร ัฐ5สภ ัฐ5เค ัณ5ฏก ัณ3ฐ ัณ5ยก ัณ5เฑ ัณ5โร ัด1 ัต5ดึ ัต5ถล ัต5ถั ัต5ถิ ัต5มณ ัต5มห ัต5รา ัต5รี ัต5ฤก ัต5ลั ัต5หล ัต5หี ัท5คี ัท5ทว ัท5ธน ัท5ธิ ัท5รา ัท5ลี ัท5ลุ ัธ5ยม ัน5ฉ่ ัน2ต ัน5ตภ ัน5ตะ ัน5ตั ัน5ตา ัน5ถธ ัน5ทึ ัน5ทุ ัน5ท่ ัน4ธ ัน5ธา ัน5ธิ ัน5ผว ัน5ฝร ัน5ฝ่ ัน5ภิ ัน5ยะ ัน5ย่ ับ1 ัป5คั ัป5ผา ัป4ร ัป5ลา ัป5หง ัป5โป ัป5โห ัพ5ยอ ัพ5ยา ัพ5โพ ัพ5โห ัฟ5ฟิ ัฟ5ริ ัม4ช ัม5ลา ัม5หม ัย5มร ัย5รุ ัล5ดี ัล5ปน ัล5ปพ ัล5ปิ ัล5ฟิ ัล5มอ ัล5มุ ัล5ออ ัล5ไซ ัล5ไฟ ัว1 ัศ5นี ัศ5มี ัศ5เจ ัส5กา ัส5ดง ัส5ดน ัส5ดี ัส5ติ ัส5ถา ัส5ปู ัส5มั ัส5มิ ัส5ยิ ัส5รั ัส5ลิ ัส5วด ัส5วร าก5ถา าก5ฝร าก5ฝั า1กร า5กรร าก5รุ าก5ฮอ า3กี า1ข า4ขบ าข5บู า1ค า4คจ า4คท า4คบ า4คป าค5ปร า4คพ าค5พื า4คภ า5ครี าง5บำ าง5ฝี าง5ฟิ าง5ออ าง5อิ า1จ า4จญ า4จห าจ5หา า4จอ า4จเ าช5กร าช5คร าช5คฤ าช5ทิ า5ชนะ าช5นี าช5ปะ าช5ลั าช5วโ าช5สก าช5สี าช5อง า1ชิ า3ชี าช5เป าช5เล าช5โอ า1ซ าญ5รอ า5ฏกะ าฏ5ดน า5ฏลิ าฏ5ลี า3ฏิ าฐ5กถ าณ5คด าณ5สถ าด5ผว า3ดอ า3ดิ าด5ไท าด5ไห า1ต า4ตญ า4ตภ าต4ว า1ท า4ทธ า4ทน า5ทนะ าท5บง าท5บร าท5สก าท5หล า4ท์ า1ธ า4ธน า2ธย าธ5ยม าน5ญ่ าน5ผู าน5รว าน5รั าน5รา าน5ฤด าน5อว านุ1 าบ5จ้ าบ5ฉว าบ5ช้ าบ5ซึ าบ4พ าบ5รื าบ5ละ า3บิ าป5สร าป5ส่ าป5แช าพ5ถ่ าพ5ยน าพ5รั าพ5ลว าฟ5ต้ าฟ5ริ า3ฟิ า1ภ า4ภป า4ภล าภ5ลอ าม5ง่ าม4น4 าม5สก าม2ห าม5หม าม5หล าม5หา าย5กล าย5กอ าย5ขว าย5ข้ าย5ชน าย5ดิ าย5ด้ า5ยตน า5ยนธ า5ยนม าย5นอ า5ยนเ าย5บร าย5ผอ าย5ฝั าย5มุ าย5ม่ าย5รุ าย5ร้ าย5ลั าย5ล่ าย5วอ าย5อำ า3ยิ าย5ไห าร5กำ าร3ค าร5ชุ าร5ณู าร5ตร า5รทะ าร5ธุ าร5บั าร5ผจ าร5พร า5รภย า1รม าร5รา าร5ละ าร5วด าร5ว่ าร5หน า1ระ า1รั า1รา า1ริ า5ริก า5ริยะ า3รี า1รุ า1ล า4ลก าล5กิ า4ลค า4ลจ าล5ฎี า4ลด าล5ดี าล5ทห า4ลป าล5ปก าล5พร า4ลว าล5อุ า4ลโ า4ล์ าว5ก่ าว5ข้ า3วดี าว5ดึ าว5นี าว5บอ าว5ยอ าว5ยื า5วรณ าว5รภ าว5รา า5ว5รี าว5รุ าว5ร้ าว5ฤก า5วอน าศ5นี า3ศร าศ5เล าษ5ดื าษ5ตร าษ5รา าษ5แก าส5กา าส5ด้ าส5ต้ าส5นี าส5ปอ าส5มห า1ห าห3ก าห5มง าฬ5โร า1อ าอนา4 า1ฮ า1เ าเม5ศ า1แ า1โ า1ไ ำ1ก ำ1ค ำท4ว ำ1น ำ1บ ำ1ป ำ1พ ำ1ม ำม5รง ำม5ลา ำ1ร ำ1ล ำ1ส ำ1ห ำ1เ ำ1แ ิก5ซี ิก5ถอ ิ1กร ิก5ร้ ิ3กฤ ิก5ล้ ิก5วา ิก5ษุ ิ3กิ ิกิ5ส ิ1ข ิ4ขส ิข5สิ ิ1ค ิ4คต ิค5ตอ ิ4คหะ ิฆ5เน ิง5ชี ิง4สต ิง4ห ิง5หา ิง5ห้ ิง5อร ิจ5ศี ิช4น ิช5ลิ ิช5เช ิญ5หน ิญ5โญ ิด5ฉิ ิด5นี ิด5ผน ิด5รอ ิด5ระ ิด5ลั ิด5ออ ิด5อ่ ิต5ซู ิต5ถี ิต5ฟอ ิต5ลด ิต5ลา ิต5วส ิ1ติ ิ3ตุ ิท5ธั ิท5สน ิ3ธี ิน5งอ ิน5ฟร ิน5ยว ิน5ยอ ิน5ย้ ิน5ระ ิน5ริ ิน5ร้ ิ5นอบ ิน5อิ ิน5ฮุ ินู5ป ิบ5บิ ิบ5ผย ิบ5ยื ิบ5ระ ิบ5รี ิบ5ลั ิบ5ลิ ิบ5ล้ ิป4ก ิป5ทอ ิป5ผล ิ3ปร ิป5สต ิป5ฮอ ิป5โป ิป5โย ิ1พ ิ4พพ ิ4พโ ิพ5โส ิฟ5ฟอ ิ1ภ ิม5ฝี ิม5ลา ิ1มุ ิย5มิ ิร5ชร ิร5วด ิ1รั ิ1รา ิ1ริ ิ1รุ ิล5ปิ ิ1ลั ิ1ลา ิ1ลิ ิว5กิ ิว5ซี ิว5ทร ิว5บิ ิว5ยอ ิว5ริ ิว5ลิ ิว5ลึ ิว5ออ ิวา5ส ิศ5พร ิศ5ร้ ิศ5เล ิศ5แพ ิษ5ณุ ิษ5ตร ิส5กร ิส5กี ิ5สตร ิส5ติ ิส5ที ิส5นี ิส5บอ ิส5รา ิส5ริ ิส5ลา ิส5ไซ ิ1ห ิหา4 ิ1อ ิ1เ ิเน4 ิ1โ ี1ก ี4กต ี4กย ีก5ย่ ีก5ริ ีฆ5สร ีช5คณ ีซ5สถ ีด5ฆ่ ีต5กว ีต5ปฏ ี1ท ีท4น ีบ5รั ีบ5รุ ีบ5ร้ ี1ป ี1พ ี4พจ ีย5กถ ีย5รย ีย5รอ ีย5ระ ีย5รั ี5ยวน ีร5ณั ีล5จุ ี4วั ีวา4 ีษ5มา ีห5นา ี5หน้ ีห5บั ีห5มุ ีห5รา ี3หล ีห5โม ีห5ไส ี1อ ีอ4ร ีอา4 ี1เ ี1แ ี1โ ี1ไ ี่5ก่ ี่5ถ้ ี่5ปุ ี่5ปู ี่3ห ี่5โค ี่5โป ี้5กร ี้5จ้ ี้5ตะ ี้5ริ ี้5ลั ี้5ลุ ี๊5กร ี๋5จ้ ี๋5อ๋ ึก5ซึ ึก5ดำ ึก5ดื ึก5ยื ึก5ระ ึก5ลั ึก5ล้ ึก5ฮั ึด5ถื ึด5ฮั ึน5ทึ ืด5ฮา ือ5กล ือ5กอ ือ5กำ ือ5ข่ ือ5จ้ ือ5ชื ือ5ดำ ือ5ตร ือ5ถื ือ5นำ ือ5บิ ือ5ปล ือ5ปื ือ5ป่ ือ5พว ือ5พ่ ือ5ยน ือ5ยา ือ5รื ือ5ลา ือ5ล้ ือ5สอ ือ5สำ ือ5อี ุก5งอ ุก5ฉก ุก5ซ่ ุก5ดิ ุก5ผา ุก5รา ุก5รุ ุก5ละ ุก5ลี ุก5ล้ ุก5อี ุก5ฮื ุข5นา ุข5ปา ุข5ภั ุข5ภา ุข5ลั ุข5ศา ุข5ศึ ุข5เด ุค5ทอ ุ3คน ุง5ถุ ุจ5ลิ ุจ5หน ุช5รา ุช5เช ุญ5จน ุญ5ฤท ุญ5แจ ุฎ5ฐั ุฑ5พ่ ุณ5ค่ ุณ5ฑก ุณสม5 ุณ5หญ ุณ5หา ุณ5หิ ุณูป5 ุด5ผา ุด5ผ่ ุด5ลอ ุด5ลุ ุด5อู ุต5กว ุต5กิ ุต5ซอ ุต5ตก ุ5ตระ ุ5ตริ ุต5ลุ ุ3ทก ุท5ธั ุ5ทริ ุท5ลุ ุท5โธ ุน5ทร ุน5ผล ุน5รอ ุบ5งิ ุบ5ซิ ุบ5บิ ุบ5ผล ุบ5ยิ ุบ5อิ ุป5กร ุป5กิ ุป5จา ุป5ถั ุป5ทา ุป5ยุ ุป3รา ุ5ปริ ุ4ปส ุป5สง ุป5สร ุป5ฮา ุป5โภ ุป5โล ุพ5พา ุพ5ภิ ุภ5ชล ุภ5เค ุม4น ุม5นุ ุม5รุ ุม5หย ุย5ช่ ุย5ฝ้ ุ1ร ุร5ข่ ุ4รค ุ4รฉ ุ4รช ุ4รท ุ4รธ ุ4รบ ุ4รพ ุ4รภ ุ5รภี ุ4รย ุ4รร ุ4รล ุ4รว ุ4รศ ุ4รส ุ4รอ ุ4รแ ุ4รโ ุล5จอ ุล5ชี ุล5ธิ ุล5มุ ุล5สต ุล5สแ ุ3ลา ุ3ลิ ุศ5โล ุษ5จี ุษ5ฎี ุษ5ปร ุ4ษย ุษ5รา ุษ5ร้ ุษ5เพ ุส5รา ุ5สละ ุส5ลิ ุส5วา ุ1ห ุห5กล ุห5นา ุ4หย ุห5ยา ุ4หเ ุห5เท ุห5เส ุ4หโ ุห5โย ุ1เ ุ1โ ุ๊5ต๊ ูก5วั ู1ช ูญ5หา ูญ5เป ูญ5เส ูด5บึ ูด5รี ูต5รู ูธ5เร ูบ5ไล ูป4ก ูป5ฌา ูป5ถ่ ูป5ทร ูป5พร ูป5ร่ ูป5แบ ูป5โฉ ูฟ5วี ู2ม ู5มิน ูร5ข่ ูร4ณ ู5รณภ ู5รณม ู5รณะ ู5รณาก ูร4พ ู5รพะ ู5รพา ูร4ม ูล5กร ูล5ค่ ู3ลั ูส4ว ู1เ ู1โ ู่1 ู้1 ู๊5ตึ ู๋5กร ู๋5จี ู๋5อี เ2 เก5ยู เก5วั เก5ศว เก5อิ เค5ซอ เค5มี เค5ศว เจ5ดี เจ5นี เ4จร เจ5ลิ เจ5โต เซ5ทิ เซ5นอ เซ5รุ เซ5แค เด5ซิ เด5บิ เด5รั เด5ลา เด5ลิ เด5ลี 2เตช เต5ปุ เต5มี เต5มู เต5ริ เต5ลุ เต5ศว เต5หะ เถ5รา เท5กร เท5คร เท5คว เท5โว เท5โศ เน4ต เน5ติ 4เนย เน5ระ เน5รั เน4ส เน5สา เน5เว เบ5ต้ เบ5บี เบ5ริ เบ5รุ เบ5ลี เป5ตอ เป5สก เป5สล เพ5ชุ เพ5ทุ เพ5สล เพ5โท เฟ5อี เภ5ตร เภ5ทุ เม5ฆิ เม5ดิ เม5ลา เร5กะ เร5ซิ เร5มอ เร5รว เร5วด เล5กร เล5คอ เล5ดี เล5วร เล5วู เล5หล เล5ฮุ เลิ4 เว5ก้ เว5ทิ เว5ฬุ เส5ฉว เส5นีย์ เส5รี เส5วก เส5วน เส5แส เห5มั เห5ยง เห5ระ เห5รั เห5ศว เห5ศั เห5สั เฬ5วร เอ5กว เอ5ซิ เอ5ธิ เอ5ฬก เฮ5ละ เฮ5ลิ เฮ5โม เฮ5โร แก5วั แค5รอ แค5ริ แค5ลอ แค5ลิ แค5แต แค5แส แช5บ๊ แช5เช แซ5ยิ แด5รี แต5แต แน2 แป5ซิ แ4ปร 3แพท แฟ5รี แ4ฟ้ แม2 แม5ชี แม5รี แม5เร แม่3 แอ5นะ โก4ฐ โก5ลอ โก5ลา โก5ลิ โก5วา โก5วี โก5ฮา โข5ทั โข5ภิ โข5เภ โข5โล โค5ตม โค5ติ โค5มู โค5ม่ โค5ริ โค5ลอ โค5ลั โค5ออ โค5อะ โค5แท โค5ไซ โจ5ปก โฉ5เบ โช5ดึ โช5ห่ โซ5กร โซ5นี โซ5ฟิ โซ5ยู โซ5ลู โซ5สเ โญ4ช โญ5ปว โด5จี โด5นี โด5รา โด5ลิ โต5กร โต5รอ โต5รา โต5ริ โต5ลิ โท5กร โท5คอ โท5พล โท5รอ โท5แอ โธ5ทน โธ5ปก โธ5วน โธ5เฟ โน5ทุ โน5ปจ โน5รม โน5รา โบ5ชุ โบ5ซอ โบ5ต้ โบ5รอ โบ5รั โบ5รา โบ5ลิ โบ5ล่ โบ5ไฮ โป5กส โป5ลิ โป5แล โป5โป โป5โล โพ5ซิ โพ5ทะ โพ5ระ โพ5ลา โพ5ลิ โพ5ลี โพ5หา โพ5แท โพ5ไซ โฟ5กร โฟ5นี โภ5คิ โภ5ไค โม5ฆี โม5ดู โม5ร็ โม5หา โม5ฮั โย5ถิ โร5กะ โร5คิ โร5งั โร5ชิ โร5ธนะ โร5รา โร5ล่ โรส4 โร5สเ โร5หน โร5อี โร5ฮิ โร5แม โร5ไล โล5กร โล5กิ โล5กี โล5จน โล5ปุ โล5มก โล5มอ โล5รา โล5วะ โล5หิ โว5นอ โศ5ธน โศ5ภิ โส5กร โส5ติ โส5ธน โส5ภิ โส5ลิ โส5วร โส5หุ โส5โค โห5ฐา โห5รส โห5ระ โห5รา โห5สิ โห5ฬา โอ5กิ โอ5คล โอ5ค็ โอ5ดี โอ5รส โอ5ละ โอ5สถ โอ5อิ โฮ5โล 3ใช้ 1ให ไก5ลา ไก5วั ไข5ข้ ไข5คว ไข5มั ไข5สั ไข5สื ไค5ศว ไช5น่ ไช5ศว ไซ5ดอ ไซ5บอ ไซ5บี ไซ5ปร ไซ5ออ ได5ฟุ ได5ฟู ได5ลิ ได5ออ ไท5ฟอ ไท5รอ ไท5แท 3ไนย ไป5ริ ไพ5ชย ไพ5ธอ ไพ5รั ไพ5ริ ไพ5ลิ ไพ5หา ไพ5โร ไพ5โอ ไฟ5แช ไฟ5แน ไภ5ริ ไม5ถิ ไม้1 ไล5บร ไล5บี ไว5กิ ไว5รั ไว5อะ ไห5รณ ไห5ศว ไห5หม ไห5หล ไอ5กร ไอ5ซี ไอ5ดอ ไอ5ติ ไอ5พอ ไอ5พ็ ไอ5ศว ไอ5ศุ ไอ5ศู ไอ5ออ ไฮ1 ็ก5ซี ็จ5ขบ ็จ5สร ็ด5ลอ ็ด5อร ็ด5อึ ็น5ฉ่ ็น5ทร ็น5รอ ็น5วู ็น5อย ็น5อ้ ็บ5ด้ ็ป5ท็ ็ม5หม ่ก5ลั ่1ค ่ง5ริ ่ง5อร ่ง5อำ ่ง5อ่ ่4ฉี ่น5ง่ ่น5ฉ่ ่น5ทะ ่น5มื ่4นย ่น5ยน ่น5ย่ ่น5รม ่ม1 ่ม5พว ่ย5กะ ่ย5ฉุ ่ย5รา ่ย5ร่ ่ว5ช้ ่ว5ถึ ่ว5ยว ่ว5ไห ่อ5กร ่อ5กว ่อ5กะ ่อ5กี ่อ5ก้ ่อ5ข่ ่อ5ตร ่อ5ตะ ่อ5ต้ ่อ5ถื ่อ5บื ่อ5ผส ่อ5มว ่อ5ม่ ่อย3 ่อ5ยอ ่อ5ย่ ่อ5รอ ่อ5ร่ ่อ3ล ่อ5ว่ ่อ5สร ่อ5ฮั ่อ5ฮ่ ่า5กล ่า5ช้ ่า5ดง ่า5ด้ ่า5ฝื ่า5พร ่า5มง ่า5รึ ่า5ร้ ่าว3 ่ำ5ชอ ่ำ5ช้ ่ำ5ต้ ่ำ5ต๊ ่ำ5ไห ่1เ ่1แ ้ก5อ้ ้ง5ถ่ ้ง5ฝุ ้น5งู ้น5ฉบ ้น5ฉ่ ้น5ทะ ้น5ทุ ้น5ท้ ้น5รุ ้น5ร่ ้ม5งว ้ม5ฉุ ้ม5น้ ้ม5ยิ ้ม5ละ ้ม5ลุ ้ม5อล ้ย5กล ้ย5งช ้ย5ล่ ้ย5อ้ ้ย5ใบ ้ว5รอ ้1ห ้อ5กร ้อ5กล ้อ5คร ้อ5คู ้อ5งอ ้อ5ฉี ้อ5ดึ ้อ5ด้ ้อ5ต๊ ้อ5ถอ ้อน3 ้อ5ผ้ ้อ5ฝั ้อ5ฟื ้อ5มู ้อ5ระ ้อ5ร่ ้อ5อึ ้อ5ฮื ้า5จอ ้า5ชื ้า5ชู ้า5ช่ ้า5ช้ ้า5ดี ้า5ถิ ้า5ถึ ้า5บ่ ้า5บ้ ้า5บ๋ ้า5ปี ้า5ผา ้า5ฝร ้า3พ ้า5มุ ้า5ว่ ้า5สม ้า5สร ้า5สล ้ำ1 ้1เ ้1แ ๊ก5ซอ ๊ก5ริ ๊ก5ลุ ๊ก5ฮว ๊ป5ซี ๊ย5ก่ ๋ย5อิ ๋อ5ด๋ ์ค5สเ ์ค5แล ์1บ ์1พ ์1ร ์1เ ์1แ ์1โ .ก6 .ข6 .ฃ6 .ค6 .ฅ6 .ฆ6 .ง6 .จ6 .ฉ6 .ช6 .ซ6 .ฌ6 .ญ6 .ฎ6 .ฏ6 .ฐ6 .ฑ6 .ฒ6 .ณ6 .ด6 .ต6 .ถ6 .ท6 .ธ6 .น6 .บ6 .ป6 .ผ6 .ฝ6 .พ6 .ฟ6 .ภ6 .ม6 .ย6 .ร6 .ฤ6 .ล6 .ฦ6 .ว6 .ศ6 .ษ6 .ส6 .ห6 .ฬ6 .อ6 .ฮ6 6ก. 6ข. 6ฃ. 6ค. 6ฅ. 6ฆ. 6ง. 6จ. 6ฉ. 6ช. 6ซ. 6ฌ. 6ญ. 6ฎ. 6ฏ. 6ฐ. 6ฑ. 6ฒ. 6ณ. 6ด. 6ต. 6ถ. 6ท. 6ธ. 6น. 6บ. 6ป. 6ผ. 6ฝ. 6พ. 6ฟ. 6ภ. 6ม. 6ย. 6ร. 6ล. 6ว. 6ศ. 6ษ. 6ส. 6ห. 6ฬ. 6อ. 6ฮ. 6ก์. 6ข์. 6ฃ์. 6ค์. 6ฅ์. 6ฆ์. 6ง์. 6จ์. 6ฉ์. 6ช์. 6ซ์. 6ฌ์. 6ญ์. 6ฎ์. 6ฏ์. 6ฐ์. 6ฑ์. 6ฒ์. 6ณ์. 6ด์. 6ต์. 6ถ์. 6ท์. 6ธ์. 6น์. 6บ์. 6ป์. 6ผ์. 6ฝ์. 6พ์. 6ฟ์. 6ภ์. 6ม์. 6ย์. 6ร์. 6ล์. 6ว์. 6ศ์. 6ษ์. 6ส์. 6ห์. 6ฬ์. 6อ์. 6ฮ์. 6กิ์. 6ขิ์. 6ฃิ์. 6คิ์. 6ฅิ์. 6ฆิ์. 6งิ์. 6จิ์. 6ฉิ์. 6ชิ์. 6ซิ์. 6ฌิ์. 6ญิ์. 6ฎิ์. 6ฏิ์. 6ฐิ์. 6ฑิ์. 6ฒิ์. 6ณิ์. 6ดิ์. 6ติ์. 6ถิ์. 6ทิ์. 6ธิ์. 6นิ์. 6บิ์. 6ปิ์. 6ผิ์. 6ฝิ์. 6พิ์. 6ฟิ์. 6ภิ์. 6มิ์. 6ยิ์. 6ริ์. 6ลิ์. 6วิ์. 6ศิ์. 6ษิ์. 6สิ์. 6หิ์. 6ฬิ์. 6อิ์. 6ฮิ์. 6กุ์. 6ขุ์. 6ฃุ์. 6คุ์. 6ฅุ์. 6ฆุ์. 6งุ์. 6จุ์. 6ฉุ์. 6ชุ์. 6ซุ์. 6ฌุ์. 6ญุ์. 6ฎุ์. 6ฏุ์. 6ฐุ์. 6ฑุ์. 6ฒุ์. 6ณุ์. 6ดุ์. 6ตุ์. 6ถุ์. 6ทุ์. 6ธุ์. 6นุ์. 6บุ์. 6ปุ์. 6ผุ์. 6ฝุ์. 6พุ์. 6ฟุ์. 6ภุ์. 6มุ์. 6ยุ์. 6รุ์. 6ลุ์. 6วุ์. 6ศุ์. 6ษุ์. 6สุ์. 6หุ์. 6ฬุ์. 6อุ์. 6ฮุ์. 6ะ 6า 6ๅ 6ำ7 6ิ 6ี 6ึ 6ื 6ุ 6ู แ6 โ6 5ไ6 7ใ6 6็ 6่ 6้ 6๊ 6๋ 6์ 6ํ 6ฺ 6๎ เ6ข เ6ฃ เ6ค เ6ฅ เ6ฆ เ6ง เ6จ เ6ฉ เ6ช เ6ซ เ6ฌ เ6ญ เ6ฎ เ6ฏ เ6ฐ เ6ฑ เ6ฒ เ6ณ เ6ด เ6ต เ6ถ เ6ท เ6ธ เ6น เ6บ เ6ป 7เ6ผ เ6ฝ เ6พ เ6ฟ เ6ภ เ6ม เ6ย เ6ร เ6ล เ6ว เ6ศ เ6ษ เ6ส เ6ห เ6ฬ เ6อ เ6ฮ ช6วา. ช6ไ ธ6ไน ม6ไห ส6ไต เลส7ไต ส6ไน ส6ไบ ส6ไป ส6ไล บ6ทคว ม6วก ม6วน ม6วด ม7วดี ม6วย ะม6วง ล7ชน ัต5ถุ ัต6ถุ์ 6ตร. ธา6ตุ. บุ6ตร. ค6รู ฮิบ6รู ฮีบ6รู ส6ภา ส7ภาร เส7ภา โส7ภา ผ6วา น6คร. .เห6ยง เปี่6 เขี้6 ม6ณี คาม7ณี .รม7ณี .รัม7ณี หม7ณี ง6วด ง6วน วัง7วน ง6วย มง6วง อย6อด พ6ญา จุ6รณ ฤ6ชา .ฤ6ทัย พรร6ดิ สวา6ดิ อ6ริ. จน6ที. ธค6ยา นิม6นา ย์ม6นา า7ณะ ิ7ณะ ุ7ณะ ณ7ณะ ก7ณะ ท7ณะ ล7ณะ ุษ7ณะ ฤษ7ณะ รป7ณะ หม7ณะ สม7ณะ ลว7ณะ รว7ณะ ร5ณะ ณร6สี ก6นะ ยก7นะ ค7นะ ย7นะ ภว7นะ มท7นะ รต7นะ ลว7นะ วจ7นะ วท7นะ วส7นะ ศม7นะ ภช7นะ ไช7นะ าลป7นะ รรธ7นะ สธ5นะ โสธ6นะ สว5นะ เสว6นะ สาว7นะ ัจ7นะ ัช7นะ ัฏ7นะ ัฒ7นะ ัต7นะ ัท7นะ ัป7นะ ัส7นะ ุจ7นะ อาส7นะ ุ7นะ 5ผี 7จำ 5งำ ห6งำ น7รำ ย7รำ ร7รำ โค7รำ ไพ7รำ น7ยำ ม7ยำ 5งง. ห6งง น7งก 5ชน. เ6ชน โ6ชน 5กร. ั6กร า7นะ ถ7ระ า7ยก. า7ยน. า7ฐี า7นี า7วี ป5โ ป6โย ป6โภ วิป7โย อุป7โภ ศ7นะ รร7มะ ต5ถี ุต6ถี 5บท. ส6บท 5บถ. ข6บถ ส6บถ 7ฟู 7ษุ 5ตะ. ค6ตะ ร6ตะ สร7ตะ า7มี มิ7ผ า7กิ า7กล ิ7กล. ์7กล 5นำ ห6นำ รี7ผ 7ณุ 5นี. ห6นี ฉ6นี าร6นี วีช6นี สส6นี มท6นี รม6นี น7ยิ ิ5ลี ุ5ลี า7ลี โค7ลี โม7ลี ท7ลี ร7ลี ก7ยะ ค7ยะ ป7ยะ ท7ยะ ธ7ยะ น7ยะ ษ7ยะ า7ยะ ิ7ยะ คี7ยะ ฆี7ยะ ณี7ยะ นี7ยะ รี5ยะ เปรี6ยะ มโห5 ิ7รี ี7รี ู7รี หา7รี ม7รี. น5รี. เต7รี. ช7รี. ถ7รี ภ7รี ภม7รี โม7รี ภุม7ร พ7รี. เว7รี 5ผล 5ดล. 5รส. ก6รส จ6รส โค6รส ท6รส พ6รส ด6รส 5คน. ณ7หา ฤๅ5 ฤา5 .ยี่7 า7วะ เท7พี เท7วี บรร7จ บรร7ถ บรร7พต 5ทก. 5ดร. น7ทร. า7ทร. โค7ทร. โล7ทร. โส7ทร. 7อู. 5พล. ไพร่7 5ศก. อัฐ5 อัฐ6ม อัฐ7มี ี7วี ู7วี ถ7วี. ส7วี. ฏ7วี. น7ตี ร7ตี อ7ตี า7ตี ิ7ตี ู7ตี า7สี ณ7สี ห7สี เว7สี ู7สี ิ7สี ก7สี โบ7ลา ู7ลา อจ7ลา เว7ลา บิว7ลา มข7ลา เอ7ลา ี7ลา โร7ลา โอ7ลา โซ7ลา ิ7กะ ุ7กะ อ7กะ นว7กะ ิณ7กะ เภ7กะ ัย7กะ ิย7กะ รธ7กะ ัฏ7กะ ัฒ7กะ ิช7กะ ศต7กะ มล7กะ 7ทุ. โซ6ร ธ6นู ัส7ดุ. ร7คต ดง7คต 5กง. เ6กง 7ฎก ณ7มี ว7มี ศ7มี ู7มี ี7ติ รุ7ติ สุ7ติ ฮ7ติ อร7ติ วีส7ติ ติงส7ติ คุป7ติ มุต6ติ ภัต6ติ ก7ดี ต7ดี พ7ดี ม7ดี ย7ดี ศ7ดี อ5ดี า7ดี ี7ดี ุ7ดี ุว7ดี ดิบ7ดี นัก7 กุณ5 กุณ6ฑ์ 7ซี. 5ที. จน6ที ี7รา ู7รา ์7รา ิต7รา ม7รา ย7รา .มก7รา รบ7รา ลิก7รา เห7รา. 7กฎ. 7กฏ. 5หะ ค6หะ นิค7หะ เค7หะ ท6หะ เท7หะ ู7หา ฬ7หา ค7หา เน7หา ่7หา 5มะ ร6มะ ห6มะ ต6มะ 5หู 5ดำ ส6ดำ 7คำ 5สะ ว6สะ 5ฐะ ส6ฐะ 7ธะ 5พี. ร6พี ทร7พี ปฐ7วี ิ7ดา ษ7บ ษ7ป ิ7ระ ี7ระ ู7ระ ช5ระ ิต7ระ ทห7ระ ท7ระ. ุก5ระ. สว7ระ ัส7ระ ิส7ระ เป7ระ อ7ยา. เก7ยา รร7ยา สา7วก ิ7ธิ ุท7ธิ. ิท5ธิ. .สิท6ธิ. บุริมสิท6ธิ. ไกรสิท6ธิ. ป7ธิ ขัดสมา6ธิ พยา6ธิ. 5ษี. ด6นู ิ7วะ ี7วะ ุ7วะ ี7วก ย7วะ เท7วะ ไท7วะ ัท7วะ าช7วะ ไศ7วะ 7ถะ 7ษะ 5พร. 5ผง 5ธี า7ชะ ิ7ชะ ร5ชะ ส7ชะ โอ7ชะ 5ฆะ 5ฟะ า7ฟี ิ7ถี ร7ถี 5ฮา 5ญี 5ผา 5หิ. สิน7ธพ สิน7ธุ. สิน7ธู 5ชู 5ศะ ิ7ละ ุ7ละ ู7ละ ย7ละ ด7ละ .วส7ละ อเจ7ล เต7ละ ่7ละ น7ทะ ท7ทะ ส7ทะ น7ตุ. รร6ตุ มา7ตฤ ิ7รพ า7รพ. ไก7รพ 5ศุ. า7ถา า7สพ พ7สพ ุ7ขี 7สอ. า7ดะ 5บะ. 5ยี. ห6ยี 5กี. 5หก. ง7อร. ม7อร. ี7วร ส7วร. พู7นท 5จร. โ6จร. 7ศพ. โป7ลี 7ภพ. 7นพ. 7ณพ. า7รก. ทก7รก ย7รก. ยว7รก. 5มล. ุ5บล. โล7บล. 5ชล. 5ชก. 7โพ 5ณู 7ปี. า7บี. 5ฏะ. า7ฬี 5ปะ. ฉ6ปะ ส6ปะ ู7ลู 5ตู. 5ยู. ิ7ชิ 7ฆี. ิ7จี ี7จี ุ7จี ู7จี เว7จี 5ศี. 5มน. 5ยอ. ผ6ยอ. 5สง. 7สร. 5ดก. ส6ดก 7โก. ก7ฝ า7มก. 5ซอ า7ขะ ู7ขะ ส5ขะ ร7ษา 5ภะ ศ7ภ ิ7ลก ุ7ฎี ศา5ข 5สา. ั6สา 7ซู 5ษก. ษ7ฐี 5ดม. ส6ดม ด7ลม. ส7ลม. ว7ลม. ี7ลม. 5ศล. นิ7ยต 7งู 5จะ. า7สก. โป7สก 5ยศ. 5ธก. 5กบ. 7คู. ส5มา. 5แล. 5พก. โส7ภ รร6ดิ. า7วก. น7นร. 5จอ. 5จบ. 5คบ. 5ฉล. ม7รม อบ7รม ิ7รม. ี7รม. 5ซน. 5ดอ. 5กิ. ซู7ซุ ซู7ฮก 5บส. น7รน. ตก7ลง ม7ตน ตัว7ตน ี7วง ศ7วง. แตร7วง แวด7วง า7ฑู 5หด. อบ7นบ นา7คร. ี7ฑา ู7ดู า7รภ. า7ฝ ล7รบ. ว7รบ. อ7รบ. า7รณ. น7ยง ม7ยง ุ7ยง ิ7ยง ิ7ยน หา7พน า7งิ ช7รถ. น7รถ. ส7รถ. ัน7ธร. มณ7ฑก มณ7โฑ มร7กต มร7ฑป ยอด7อก โล่ง7อก ยืด7อก ห7ห 5ทด. ว7นม. ทพ7นม. โค7นม ษ7ฎร. ิ7ปุ ิ7ปู ี7รอ. ย7ลำ อ7ลำ ้7ลำ น7ทม. ป7ทม. วก7วน อล7วน ิ7จล. ช7ญะ ี7ข ศีล7 5ธม. สม7รด สัก7วา สัป7ด สัป7ท า7สม. อ7สม. า7นล. ี7รุ ู7รุ เน7รุ ง7หล สีห7นุ 5ภร. 5จด. บ7ยก. ดิ7ศร ร7ศร อพ7ยพ ร7ชร. รส7กา ลส7กา อาจ7อง ี7มู อึง7อล ุ7ชุ ุ7สภ. เก7ชา เก7ศา ช7ตก. บ7ตก. เข7ฬะ ห7ณี อ7ปน. ย7ชม. เบื้อง7 5คะ ง7ออ. อ7ออ. เรือ7ธ เรือ7บ เลี้ยว7 5กก. เ6กก อ7ขอ. า7กอ. แด7วู บ7ยล. โฉ7เก โด7มร โต7มร 7โผ โท7โส ้7ปด. 7คี. โย7นก. โส7มม 7ฬส. ต7ถิ 7โฮ ใจ7 5ฟง ไช7โย 5พต. กรร7กศ ล7บก. ศ7ยป. า7นน. ุ7ฎา ู7ฏา า7มอ. ท7โท ุ7ทส จ่า7ร ฬ7หี า7ฒะ ธต7รฐ ท7คล. ต7ถร. ิ7ฐิ ป7ผะ พฤ7ษภ. ิ7ธุ า7ฬก. ห7สิ ฏ7ฏิ. ษ7ฏิ. ศิษ7ฎิ ษ7ฏี 5ษส. ิ7ปิ ู7ริ. ฑ7ฑุ ษ7ฏุ า7ตา ว7ตก ง7ตก เก6ตุ. ส7ตุ ลิ7บง ฮ7โ 7อุ.",
+ ["length"]=53247,
["minhyphenmax"]=1,
["minhyphenmin"]=1,
- ["n"]=4275,
+ ["n"]=4286,
},
["version"]="1.001",
}
\ No newline at end of file
diff --git a/Master/texmf-dist/tex/context/patterns/lang-th.pat b/Master/texmf-dist/tex/context/patterns/lang-th.pat
index 9e9b40ccae1..7e37831da78 100644
--- a/Master/texmf-dist/tex/context/patterns/lang-th.pat
+++ b/Master/texmf-dist/tex/context/patterns/lang-th.pat
@@ -331,6 +331,7 @@
ชร5ฤก
ชร5หล
ชร5หึ
+ชร5อุ
ชว4โ
ชอง4
1ชั
@@ -361,14 +362,13 @@
ช้5สอ
ช้5ได
ซน5ทร
-ซฟ5ตี
ซ5ราม
ซล5มอ
+1ซั
1ซา
ซา5ชู
ซา5มู
ซิ5ตร
-ซิ5ตี
ซิ5ฟิ
ซิ5แล
ซี5ดี
@@ -579,13 +579,13 @@
ติ5มส
ติ5มอ
ติ5ยม
-ติ5อิ
4ติ์
ตี5ขล
ตี5คู
ตี5ตื
ตี5รว
ตี5ลั
+3ตี้.
ตุ5ตถ
ตุ5ทส
ตุ5ป่
@@ -619,7 +619,6 @@
3ถุน
ถ่5ถอ
ถ่5ถา
-ทก5ซั
4ทกา
ทค5ติ
ทค5นี
@@ -875,6 +874,7 @@
บา5รน
บา5รอ
บา5สม
+บิ5ชอ
บี5คิ
บี5ร่
1บุ
@@ -955,6 +955,7 @@
พจ5นี
พช5ฉล
พช5รา
+พท5ริ
พทัก4
พน5ทะ
พ4นั
@@ -1130,7 +1131,6 @@
มิ5ลำ
มิ5แพ
มี5ขม
-มี5หน
3มืด
1มือ.
มุ5ทะ
@@ -1299,7 +1299,9 @@
รร5แส
รร5ไก
รร5ไต
+รล5ออ
รศ5นี
+รษ5ฐิ
รษ5ตร
ร1ส
ร4สก
@@ -1336,7 +1338,6 @@
4ริยจ
4ริยย
4ริร
-ริ5อ่
ริ5แล
4ริ่
รี5คู
@@ -1377,8 +1378,8 @@
ร่5หล
ร์5กิ
ร์5กี
+ร์5ดิ
ร์5ติ
-ร์5ตี
ร์5ตู
ร์5ทิ
ร์5ฟอ
@@ -1423,6 +1424,7 @@
1ลักษ
ลา5กล
ลา5นี
+ลา5บร
ลา5ป๋
ลา5พอ
ลา5มี
@@ -1433,6 +1435,7 @@
ลา5ส้
ลิ5ก่
ลิ5จู
+ลิ5ซิ
ลิ5ซึ
ลิ5ตอ
ลิ5นอ
@@ -1441,7 +1444,6 @@
ลิ5บา
ลิ5ฟอ
ลิ5มู
-ลิ5อิ
ลิ5ไท
ลิ5ไล
ลี5ตะ
@@ -1489,6 +1491,7 @@
วน5รว
วน5ร่
วน5อิ
+วน5อุ
วบ5ยอ
วบ5รว
วบ5รั
@@ -1519,6 +1522,7 @@
วันต5
วันท4
1วา
+วา5ดะ
วา4ต
วา5ตก
วา5ติ
@@ -1573,6 +1577,7 @@
ว้5ลา
ว์5ลิ
ศ1จ
+ศน5อุ
ศพิ4
3ศรี
ศ2วร
@@ -1760,7 +1765,6 @@
ฬา5มณ
ฬา5รึ
อก5ซอ
-อก5ซั
อก5ถล
อก5รณ
อก5รี
@@ -1782,6 +1786,7 @@
อง5ฟอ
อง5ฟุ
อง5ระ
+อง5อุ
อง5อ้
อด5ถอ
อด5น่
@@ -1790,11 +1795,15 @@
อด5รั
อด5อย
อด5ออ
+อด5อุ
อด5อ้
อ3ดิ
อต5ดอ
อต5ด็
+อต5ไว
อ1ท
+อ4ทค
+อท5คอ
อน5ง้
อน5ดร
อน5ทำ
@@ -1808,8 +1817,7 @@
อบ5ช้
อบ5ถา
อบ5บี
-อบ5อว
-อบ5อ้
+อบ3อ
อบ5ไล
อป5กิ
อป5ติ
@@ -1852,7 +1860,6 @@
อ3ร้
อร์1
อล5จี
-อล5ซั
อล5นี
อล5ฟ่
อล5มอ
@@ -1873,7 +1880,6 @@
อส5แอ
อส5ไพ
อ1ห
-3ออน
ออ5อว
อะ5ธี
1อั
@@ -1910,7 +1916,6 @@
อี5ลุ
อี5ศว
อี5หร
-1อุ
อุ5กฤ
อุ5กล
อุ5คร
@@ -1957,6 +1962,7 @@
3ฮอล
ฮา5นอ
ฮา5ป่
+ฮา5ร่
ฮิ5บร
ฮี5บร
3ฮื้
@@ -2018,6 +2024,7 @@
ัง5ศุ
ัง4ส5ว
ัง5อว
+ัง5อุ
ัง5ฮี
ัจ5กล
ัจ5ญะ
@@ -2314,6 +2321,7 @@
าล5ปก
าล5พร
า4ลว
+าล5อุ
า4ลโ
า4ล์
าว5ก่
@@ -2443,7 +2451,6 @@
ิบ5ลิ
ิบ5ล้
ิป4ก
-ิป5ซั
ิป5ทอ
ิป5ผล
ิ3ปร
@@ -2479,6 +2486,7 @@
ิว5ริ
ิว5ลิ
ิว5ลึ
+ิว5ออ
ิวา5ส
ิศ5พร
ิศ5ร้
@@ -2499,6 +2507,7 @@
ิส5ไซ
ิ1ห
ิหา4
+ิ1อ
ิ1เ
ิเน4
ิ1โ
@@ -2556,7 +2565,6 @@
ี่5โป
ี้5กร
ี้5จ้
-ี้5ซั
ี้5ตะ
ี้5ริ
ี้5ลั
@@ -2798,7 +2806,6 @@
เ4จร
เจ5ลิ
เจ5โต
-เซ5ซั
เซ5ทิ
เซ5นอ
เซ5รุ
@@ -2998,6 +3005,7 @@
โม5ดู
โม5ร็
โม5หา
+โม5ฮั
โย5ถิ
โร5กะ
โร5คิ
@@ -3065,6 +3073,7 @@
ไซ5บอ
ไซ5บี
ไซ5ปร
+ไซ5ออ
ได5ฟุ
ได5ฟู
ได5ลิ
@@ -3087,7 +3096,6 @@
ไภ5ริ
ไม5ถิ
ไม้1
-ไร5ตี
ไล5บร
ไล5บี
ไว5กิ
@@ -3106,8 +3114,8 @@
ไอ5ศว
ไอ5ศุ
ไอ5ศู
+ไอ5ออ
ไฮ1
-็ก5ซั
็ก5ซี
็จ5ขบ
็จ5สร
@@ -3625,6 +3633,7 @@
ท7ณะ
ล7ณะ
ุษ7ณะ
+ฤษ7ณะ
รป7ณะ
หม7ณะ
สม7ณะ
@@ -3810,6 +3819,7 @@
ร7ตี
อ7ตี
า7ตี
+ิ7ตี
ู7ตี
า7สี
ณ7สี
@@ -4279,4 +4289,5 @@
เก6ตุ.
ส7ตุ
ลิ7บง
-ฮ7โ}
\ No newline at end of file
+ฮ7โ
+7อุ.}
\ No newline at end of file
diff --git a/Master/texmf-dist/tex/context/sample/cervantes-es.tex b/Master/texmf-dist/tex/context/sample/cervantes-es.tex
new file mode 100644
index 00000000000..153797023ed
--- /dev/null
+++ b/Master/texmf-dist/tex/context/sample/cervantes-es.tex
@@ -0,0 +1,6 @@
+En un lugar de la Mancha, de cuyo nombre no quiero acordar-me, no ha
+mucho tiempo que vivía un hidalgo de los de lanza en astillero, adarga
+antigua, rocín flaco y galgo corredor. Una olla de algo más vaca que
+carnero, salpicón las más noches, duelos y quebrantos los sábados,
+lantejas los viernes, algún palomino de añadidura los domingos,
+consumían las tres partes de su hacienda.
diff --git a/Master/texmf-dist/tex/context/sample/quevedo-es.tex b/Master/texmf-dist/tex/context/sample/quevedo-es.tex
new file mode 100644
index 00000000000..166b0328fb8
--- /dev/null
+++ b/Master/texmf-dist/tex/context/sample/quevedo-es.tex
@@ -0,0 +1,19 @@
+\startlines
+Un soneto me manda hacer Violante
+que en mi vida me he visto en tanto aprieto;
+catorce versos dicen que es soneto;
+burla burlando van los tres delante.
+
+Yo pensé que no hallara consonante,
+y estoy a la mitad de otro cuarteto;
+mas si me veo en el primer terceto,
+no hay cosa en los cuartetos que me espante.
+
+Por el primer terceto voy entrando,
+y parece que entré con pie derecho,
+pues fin con este verso le voy dando.
+
+Ya estoy en el segundo, y aun sospecho
+que voy los trece versos acabando;
+contad si son catorce, y está hecho.
+\stoplines
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua
index 4a46fbb0782..a304ab6aaa0 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-gen.lua
@@ -89,6 +89,7 @@ local remapper = {
fea = "font feature files",
pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
+ afm = "afm",
}
function resolvers.findfile(name,fileformat)
@@ -117,6 +118,11 @@ end
resolvers.findbinfile = resolvers.findfile
+function resolvers.loadbinfile(filename,filetype)
+ local data = io.loaddata(filename)
+ return true, data, #data
+end
+
function resolvers.resolve(s)
return s
end
@@ -149,19 +155,29 @@ do
local cachepaths = kpse.expand_var('$TEXMFCACHE') or ""
- -- quite like tex live or so
+ -- quite like tex live or so (the weird $TEXMFCACHE test seems to be needed on miktex)
- if cachepaths == "" then
+ if cachepaths == "" or cachepaths == "$TEXMFCACHE" then
cachepaths = kpse.expand_var('$TEXMFVAR') or ""
end
- -- this also happened to be used
+ -- this also happened to be used (the weird $TEXMFVAR test seems to be needed on miktex)
- if cachepaths == "" then
+ if cachepaths == "" or cachepaths == "$TEXMFVAR" then
cachepaths = kpse.expand_var('$VARTEXMF') or ""
end
- -- and this is a last resort
+ -- and this is a last resort (hm, we could use TEMP or TEMPDIR)
+
+ if cachepaths == "" then
+ local fallbacks = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
+ for i=1,#fallbacks do
+ cachepaths = os.getenv(fallbacks[i]) or ""
+ if cachepath ~= "" and lfs.isdir(cachepath) then
+ break
+ end
+ end
+ end
if cachepaths == "" then
cachepaths = "."
@@ -238,6 +254,18 @@ function caches.loaddata(paths,name)
for i=1,#paths do
local data = false
local luaname, lucname = makefullname(paths[i],name)
+ if lucname and not lfs.isfile(lucname) and type(caches.compile) == "function" then
+ -- in case we used luatex and luajittex mixed ... lub or luc file
+ texio.write(string.format("(compiling luc: %s)",lucname))
+ data = loadfile(luaname)
+ if data then
+ data = data()
+ end
+ if data then
+ caches.compile(data,luaname,lucname)
+ return data
+ end
+ end
if lucname and lfs.isfile(lucname) then -- maybe also check for size
texio.write(string.format("(load luc: %s)",lucname))
data = loadfile(lucname)
@@ -267,7 +295,7 @@ function caches.savedata(path,name,data)
local luaname, lucname = makefullname(path,name)
if luaname then
texio.write(string.format("(save: %s)",luaname))
- table.tofile(luaname,data,true,{ reduce = true })
+ table.tofile(luaname,data,true)
if lucname and type(caches.compile) == "function" then
os.remove(lucname) -- better be safe
texio.write(string.format("(save: %s)",lucname))
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua
index 5ab9df7f94b..373dab5a8c7 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics-nod.lua
@@ -54,22 +54,33 @@ nodes.handlers = { }
local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end
local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end
local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" }
+local disccodes = { [0] = "discretionary", "explicit", "automatic", "regular", "first", "second" }
nodes.nodecodes = nodecodes
nodes.whatcodes = whatcodes
nodes.whatsitcodes = whatcodes
nodes.glyphcodes = glyphcodes
+nodes.disccodes = disccodes
local free_node = node.free
local remove_node = node.remove
local new_node = node.new
local traverse_id = node.traverse_id
-local math_code = nodecodes.math
-
nodes.handlers.protectglyphs = node.protect_glyphs
nodes.handlers.unprotectglyphs = node.unprotect_glyphs
+local math_code = nodecodes.math
+local end_of_math = node.end_of_math
+
+function node.end_of_math(n)
+ if n.id == math_code and n.subtype == 1 then
+ return n
+ else
+ return end_of_math(n)
+ end
+end
+
function nodes.remove(head, current, free_too)
local t = current
head, current = remove_node(head,current)
@@ -88,17 +99,80 @@ function nodes.delete(head,current)
return nodes.remove(head,current,true)
end
-nodes.before = node.insert_before
-nodes.after = node.insert_after
-
function nodes.pool.kern(k)
local n = new_node("kern",1)
n.kern = k
return n
end
-function nodes.endofmath(n)
- for n in traverse_id(math_code,n.next) do
- return n
- end
-end
+-- experimental
+
+local getfield = node.getfield or function(n,tag) return n[tag] end
+local setfield = node.setfield or function(n,tag,value) n[tag] = value end
+
+nodes.getfield = getfield
+nodes.setfield = setfield
+
+nodes.getattr = getfield
+nodes.setattr = setfield
+
+if node.getid then nodes.getid = node.getid else function nodes.getid (n) return getfield(n,"id") end end
+if node.getsubtype then nodes.getsubtype = node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end
+if node.getnext then nodes.getnext = node.getnext else function nodes.getnext (n) return getfield(n,"next") end end
+if node.getprev then nodes.getprev = node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end
+if node.getchar then nodes.getchar = node.getchar else function nodes.getchar (n) return getfield(n,"char") end end
+if node.getfont then nodes.getfont = node.getfont else function nodes.getfont (n) return getfield(n,"font") end end
+if node.getlist then nodes.getlist = node.getlist else function nodes.getlist (n) return getfield(n,"list") end end
+
+function nodes.tonut (n) return n end
+function nodes.tonode(n) return n end
+
+-- being lazy ... just copy a bunch ... not all needed in generic but we assume
+-- nodes to be kind of private anyway
+
+nodes.tostring = node.tostring or tostring
+nodes.copy = node.copy
+nodes.copy_list = node.copy_list
+nodes.delete = node.delete
+nodes.dimensions = node.dimensions
+nodes.end_of_math = node.end_of_math
+nodes.flush_list = node.flush_list
+nodes.flush_node = node.flush_node
+nodes.free = node.free
+nodes.insert_after = node.insert_after
+nodes.insert_before = node.insert_before
+nodes.hpack = node.hpack
+nodes.new = node.new
+nodes.tail = node.tail
+nodes.traverse = node.traverse
+nodes.traverse_id = node.traverse_id
+nodes.slide = node.slide
+nodes.vpack = node.vpack
+
+nodes.first_glyph = node.first_glyph
+nodes.first_character = node.first_character
+nodes.has_glyph = node.has_glyph or node.first_glyph
+
+nodes.current_attr = node.current_attr
+nodes.do_ligature_n = node.do_ligature_n
+nodes.has_field = node.has_field
+nodes.last_node = node.last_node
+nodes.usedlist = node.usedlist
+nodes.protrusion_skippable = node.protrusion_skippable
+nodes.write = node.write
+
+nodes.has_attribute = node.has_attribute
+nodes.set_attribute = node.set_attribute
+nodes.unset_attribute = node.unset_attribute
+
+nodes.protect_glyphs = node.protect_glyphs
+nodes.unprotect_glyphs = node.unprotect_glyphs
+nodes.kerning = node.kerning
+nodes.ligaturing = node.ligaturing
+nodes.mlist_to_hlist = node.mlist_to_hlist
+
+-- in generic code, at least for some time, we stay nodes, while in context
+-- we can go nuts (e.g. experimental); this split permits us us keep code
+-- used elsewhere stable but at the same time play around in context
+
+nodes.nuts = nodes
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics.tex b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics.tex
index bb34587ff6c..abe49897016 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-basics.tex
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-basics.tex
@@ -12,6 +12,8 @@
%D rather dumb attribute allocator. We start at 256 because we don't want
%D any interference with the attributes used in the font handler.
+\ifx\newattribute\undefined \else \endinput \fi
+
\newcount \lastallocatedattribute \lastallocatedattribute=255
\def\newattribute#1%
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-inj.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-inj.lua
new file mode 100644
index 00000000000..5e6c0707092
--- /dev/null
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-inj.lua
@@ -0,0 +1,523 @@
+if not modules then modules = { } end modules ['node-inj'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This is very experimental (this will change when we have luatex > .50 and
+-- a few pending thingies are available. Also, Idris needs to make a few more
+-- test fonts. Btw, future versions of luatex will have extended glyph properties
+-- that can be of help. Some optimizations can go away when we have faster machines.
+
+local next = next
+local utfchar = utf.char
+
+local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
+
+local report_injections = logs.reporter("nodes","injections")
+
+local attributes, nodes, node = attributes, nodes, node
+
+fonts = fonts
+local fontdata = fonts.hashes.identifiers
+
+nodes.injections = nodes.injections or { }
+local injections = nodes.injections
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local kern_code = nodecodes.kern
+local nodepool = nodes.pool
+local newkern = nodepool.kern
+
+local traverse_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+
+local a_kernpair = attributes.private('kernpair')
+local a_ligacomp = attributes.private('ligacomp')
+local a_markbase = attributes.private('markbase')
+local a_markmark = attributes.private('markmark')
+local a_markdone = attributes.private('markdone')
+local a_cursbase = attributes.private('cursbase')
+local a_curscurs = attributes.private('curscurs')
+local a_cursdone = attributes.private('cursdone')
+
+-- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
+-- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
+-- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
+-- that this code is not 100% okay but examples are needed to figure things out.
+
+function injections.installnewkern(nk)
+ newkern = nk or newkern
+end
+
+local cursives = { }
+local marks = { }
+local kerns = { }
+
+-- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in
+-- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we
+-- can share tables.
+
+-- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs
+-- checking with husayni (volt and fontforge).
+
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+ local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
+ local ws, wn = tfmstart.width, tfmnext.width
+ local bound = #cursives + 1
+ start[a_cursbase] = bound
+ nxt[a_curscurs] = bound
+ cursives[bound] = { rlmode, dx, dy, ws, wn }
+ return dx, dy, bound
+end
+
+function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
+ local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
+ -- dy = y - h
+ if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
+ local bound = current[a_kernpair]
+ if bound then
+ local kb = kerns[bound]
+ -- inefficient but singles have less, but weird anyway, needs checking
+ kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
+ else
+ bound = #kerns + 1
+ current[a_kernpair] = bound
+ kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
+ end
+ return x, y, w, h, bound
+ end
+ return x, y, w, h -- no bound
+end
+
+function injections.setkern(current,factor,rlmode,x,tfmchr)
+ local dx = factor*x
+ if dx ~= 0 then
+ local bound = #kerns + 1
+ current[a_kernpair] = bound
+ kerns[bound] = { rlmode, dx }
+ return dx, bound
+ else
+ return 0, 0
+ end
+end
+
+function injections.setmark(start,base,factor,rlmode,ba,ma) -- ba=baseanchor, ma=markanchor
+ local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
+ local bound = base[a_markbase]
+ local index = 1
+ if bound then
+ local mb = marks[bound]
+ if mb then
+ -- if not index then index = #mb + 1 end
+ index = #mb + 1
+ mb[index] = { dx, dy, rlmode }
+ start[a_markmark] = bound
+ start[a_markdone] = index
+ return dx, dy, bound
+ else
+ report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
+ end
+ end
+ index = index or 1
+ bound = #marks + 1
+ base[a_markbase] = bound
+ start[a_markmark] = bound
+ start[a_markdone] = index
+ marks[bound] = { [index] = { dx, dy, rlmode } }
+ return dx, dy, bound
+end
+
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+
+local function trace(head)
+ report_injections("begin run")
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype < 256 then
+ local kp = n[a_kernpair]
+ local mb = n[a_markbase]
+ local mm = n[a_markmark]
+ local md = n[a_markdone]
+ local cb = n[a_cursbase]
+ local cc = n[a_curscurs]
+ local char = n.char
+ report_injections("font %s, char %U, glyph %c",n.font,char,char)
+ if kp then
+ local k = kerns[kp]
+ if k[3] then
+ report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
+ else
+ report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
+ end
+ end
+ if mb then
+ report_injections(" markbase: bound %a",mb)
+ end
+ if mm then
+ local m = marks[mm]
+ if mb then
+ local m = m[mb]
+ if m then
+ report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
+ else
+ report_injections(" markmark: bound %a, missing index",mm)
+ end
+ else
+ m = m[1]
+ report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
+ end
+ end
+ if cb then
+ report_injections(" cursbase: bound %a",cb)
+ end
+ if cc then
+ local c = cursives[cc]
+ report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
+ end
+ end
+ end
+ report_injections("end run")
+end
+
+-- todo: reuse tables (i.e. no collection), but will be extra fields anyway
+-- todo: check for attribute
+
+-- We can have a fast test on a font being processed, so we can check faster for marks etc
+-- but I'll make a context variant anyway.
+
+local function show_result(head)
+ local current = head
+ local skipping = false
+ while current do
+ local id = current.id
+ if id == glyph_code then
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
+ skipping = false
+ elseif id == kern_code then
+ report_injections("kern: %p",current.kern)
+ skipping = false
+ elseif not skipping then
+ report_injections()
+ skipping = true
+ end
+ current = current.next
+ end
+end
+
+function injections.handler(head,where,keep)
+ local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
+ if has_marks or has_cursives then
+ if trace_injections then
+ trace(head)
+ end
+ -- in the future variant we will not copy items but refs to tables
+ local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0
+ if has_kerns then -- move outside loop
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
+ if n.subtype < 256 then
+ nofvalid = nofvalid + 1
+ valid[nofvalid] = n
+ if n.font ~= nf then
+ nf = n.font
+ tm = fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n] = tm[n.char]
+ end
+ local k = n[a_kernpair]
+ if k then
+ local kk = kerns[k]
+ if kk then
+ local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
+ local dy = y - h
+ if dy ~= 0 then
+ ky[n] = dy
+ end
+ if w ~= 0 or x ~= 0 then
+ wx[n] = kk
+ end
+ rl[n] = kk[1] -- could move in test
+ end
+ end
+ end
+ end
+ else
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype < 256 then
+ nofvalid = nofvalid + 1
+ valid[nofvalid] = n
+ if n.font ~= nf then
+ nf = n.font
+ tm = fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n] = tm[n.char]
+ end
+ end
+ end
+ end
+ if nofvalid > 0 then
+ -- we can assume done == true because we have cursives and marks
+ local cx = { }
+ if has_kerns and next(ky) then
+ for n, k in next, ky do
+ n.yoffset = k
+ end
+ end
+ -- todo: reuse t and use maxt
+ if has_cursives then
+ local p_cursbase, p = nil, nil
+ -- since we need valid[n+1] we can also use a "while true do"
+ local t, d, maxt = { }, { }, 0
+ for i=1,nofvalid do -- valid == glyphs
+ local n = valid[i]
+ if not mk[n] then
+ local n_cursbase = n[a_cursbase]
+ if p_cursbase then
+ local n_curscurs = n[a_curscurs]
+ if p_cursbase == n_curscurs then
+ local c = cursives[n_curscurs]
+ if c then
+ local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5]
+ if rlmode >= 0 then
+ dx = dx - ws
+ else
+ dx = dx + wn
+ end
+ if dx ~= 0 then
+ cx[n] = dx
+ rl[n] = rlmode
+ end
+ -- if rlmode and rlmode < 0 then
+ dy = -dy
+ -- end
+ maxt = maxt + 1
+ t[maxt] = p
+ d[maxt] = dy
+ else
+ maxt = 0
+ end
+ end
+ elseif maxt > 0 then
+ local ny = n.yoffset
+ for i=maxt,1,-1 do
+ ny = ny + d[i]
+ local ti = t[i]
+ ti.yoffset = ti.yoffset + ny
+ end
+ maxt = 0
+ end
+ if not n_cursbase and maxt > 0 then
+ local ny = n.yoffset
+ for i=maxt,1,-1 do
+ ny = ny + d[i]
+ local ti = t[i]
+ ti.yoffset = ny
+ end
+ maxt = 0
+ end
+ p_cursbase, p = n_cursbase, n
+ end
+ end
+ if maxt > 0 then
+ local ny = n.yoffset
+ for i=maxt,1,-1 do
+ ny = ny + d[i]
+ local ti = t[i]
+ ti.yoffset = ny
+ end
+ maxt = 0
+ end
+ if not keep then
+ cursives = { }
+ end
+ end
+ if has_marks then
+ for i=1,nofvalid do
+ local p = valid[i]
+ local p_markbase = p[a_markbase]
+ if p_markbase then
+ local mrks = marks[p_markbase]
+ local nofmarks = #mrks
+ for n in traverse_id(glyph_code,p.next) do
+ local n_markmark = n[a_markmark]
+ if p_markbase == n_markmark then
+ local index = n[a_markdone] or 1
+ local d = mrks[index]
+ if d then
+ local rlmode = d[3]
+ --
+ local k = wx[p]
+ if k then
+ local x = k[2]
+ local w = k[4]
+ if w then
+ if rlmode and rlmode >= 0 then
+ -- kern(x) glyph(p) kern(w-x) mark(n)
+ n.xoffset = p.xoffset - p.width + d[1] - (w-x)
+ else
+ -- kern(w-x) glyph(p) kern(x) mark(n)
+ n.xoffset = p.xoffset - d[1] - x
+ end
+ else
+ if rlmode and rlmode >= 0 then
+ -- okay for husayni
+ n.xoffset = p.xoffset - p.width + d[1]
+ else
+ -- needs checking: is x ok here?
+ n.xoffset = p.xoffset - d[1] - x
+ end
+ end
+ else
+ if rlmode and rlmode >= 0 then
+ n.xoffset = p.xoffset - p.width + d[1]
+ else
+ n.xoffset = p.xoffset - d[1]
+ end
+ local w = n.width
+ if w ~= 0 then
+ insert_node_before(head,n,newkern(-w/2))
+ insert_node_after(head,n,newkern(-w/2))
+ end
+ end
+ -- --
+ if mk[p] then
+ n.yoffset = p.yoffset + d[2]
+ else
+ n.yoffset = n.yoffset + p.yoffset + d[2]
+ end
+ --
+ if nofmarks == 1 then
+ break
+ else
+ nofmarks = nofmarks - 1
+ end
+ end
+ else
+ -- KE: there can be sequences in ligatures
+ end
+ end
+ end
+ end
+ if not keep then
+ marks = { }
+ end
+ end
+ -- todo : combine
+ if next(wx) then
+ for n, k in next, wx do
+ -- only w can be nil (kernclasses), can be sped up when w == nil
+ local x = k[2]
+ local w = k[4]
+ if w then
+ local rl = k[1] -- r2l = k[6]
+ local wx = w - x
+ if rl < 0 then -- KE: don't use r2l here
+ if wx ~= 0 then
+ insert_node_before(head,n,newkern(wx)) -- type 0/2
+ end
+ if x ~= 0 then
+ insert_node_after (head,n,newkern(x)) -- type 0/2
+ end
+ else
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x)) -- type 0/2
+ end
+ if wx ~= 0 then
+ insert_node_after (head,n,newkern(wx)) -- type 0/2
+ end
+ end
+ elseif x ~= 0 then
+ -- this needs checking for rl < 0 but it is unlikely that a r2l script
+ -- uses kernclasses between glyphs so we're probably safe (KE has a
+ -- problematic font where marks interfere with rl < 0 in the previous
+ -- case)
+ insert_node_before(head,n,newkern(x)) -- a real font kern, type 0
+ end
+ end
+ end
+ if next(cx) then
+ for n, k in next, cx do
+ if k ~= 0 then
+ local rln = rl[n]
+ if rln and rln < 0 then
+ insert_node_before(head,n,newkern(-k)) -- type 0/2
+ else
+ insert_node_before(head,n,newkern(k)) -- type 0/2
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns = { }
+ end
+ -- if trace_injections then
+ -- show_result(head)
+ -- end
+ return head, true
+ elseif not keep then
+ kerns, cursives, marks = { }, { }, { }
+ end
+ elseif has_kerns then
+ if trace_injections then
+ trace(head)
+ end
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype < 256 then
+ local k = n[a_kernpair]
+ if k then
+ local kk = kerns[k]
+ if kk then
+ local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
+ if y and y ~= 0 then
+ n.yoffset = y -- todo: h ?
+ end
+ if w then
+ -- copied from above
+ -- local r2l = kk[6]
+ local wx = w - x
+ if rl < 0 then -- KE: don't use r2l here
+ if wx ~= 0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x ~= 0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx ~= 0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
+ else
+ -- simple (e.g. kernclass kerns)
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ end
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns = { }
+ end
+ -- if trace_injections then
+ -- show_result(head)
+ -- end
+ return head, true
+ else
+ -- no tracing needed
+ end
+ return head, false
+end
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua
index cf5862ca9f4..dd98686267a 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 05/28/13 00:34:00
+-- merge date : 04/28/14 23:24:10
do -- begin closure to overcome local limits and interference
@@ -82,6 +82,9 @@ function optionalrequire(...)
return result
end
end
+if lua then
+ lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
+end
end -- closure
@@ -95,12 +98,15 @@ if not modules then modules={} end modules ['l-lpeg']={
license="see context related readme files"
}
lpeg=require("lpeg")
+if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end
local type,next,tostring=type,next,tostring
local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
local floor=math.floor
local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
-setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+if setinspector then
+ setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+end
lpeg.patterns=lpeg.patterns or {}
local patterns=lpeg.patterns
local anything=P(1)
@@ -110,28 +116,46 @@ patterns.anything=anything
patterns.endofstring=endofstring
patterns.beginofstring=alwaysmatched
patterns.alwaysmatched=alwaysmatched
-local digit,sign=R('09'),S('+-')
+local sign=S('+-')
+local zero=P('0')
+local digit=R('09')
+local octdigit=R("07")
+local lowercase=R("az")
+local uppercase=R("AZ")
+local underscore=P("_")
+local hexdigit=digit+lowercase+uppercase
local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
local newline=crlf+S("\r\n")
local escaped=P("\\")*anything
local squote=P("'")
local dquote=P('"')
local space=P(" ")
-local utfbom_32_be=P('\000\000\254\255')
-local utfbom_32_le=P('\255\254\000\000')
-local utfbom_16_be=P('\255\254')
-local utfbom_16_le=P('\254\255')
-local utfbom_8=P('\239\187\191')
+local period=P(".")
+local comma=P(",")
+local utfbom_32_be=P('\000\000\254\255')
+local utfbom_32_le=P('\255\254\000\000')
+local utfbom_16_be=P('\254\255')
+local utfbom_16_le=P('\255\254')
+local utfbom_8=P('\239\187\191')
local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
+local utfstricttype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")
local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
local utf8next=R("\128\191")
+patterns.utfbom_32_be=utfbom_32_be
+patterns.utfbom_32_le=utfbom_32_le
+patterns.utfbom_16_be=utfbom_16_be
+patterns.utfbom_16_le=utfbom_16_le
+patterns.utfbom_8=utfbom_8
+patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
+patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
patterns.utf8one=R("\000\127")
patterns.utf8two=R("\194\223")*utf8next
patterns.utf8three=R("\224\239")*utf8next*utf8next
patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
patterns.utfbom=utfbom
patterns.utftype=utftype
+patterns.utfstricttype=utfstricttype
patterns.utfoffset=utfoffset
local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
@@ -151,27 +175,14 @@ patterns.spacer=spacer
patterns.whitespace=whitespace
patterns.nonspacer=nonspacer
patterns.nonwhitespace=nonwhitespace
-local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
patterns.stripper=stripper
+patterns.fullstripper=fullstripper
patterns.collapser=collapser
-patterns.digit=digit
-patterns.sign=sign
-patterns.cardinal=sign^0*digit^1
-patterns.integer=sign^0*digit^1
-patterns.unsigned=digit^0*P('.')*digit^1
-patterns.float=sign^0*patterns.unsigned
-patterns.cunsigned=digit^0*P(',')*digit^1
-patterns.cfloat=sign^0*patterns.cunsigned
-patterns.number=patterns.float+patterns.integer
-patterns.cnumber=patterns.cfloat+patterns.integer
-patterns.oct=P("0")*R("07")^1
-patterns.octal=patterns.oct
-patterns.HEX=P("0x")*R("09","AF")^1
-patterns.hex=P("0x")*R("09","af")^1
-patterns.hexadecimal=P("0x")*R("09","AF","af")^1
-patterns.lowercase=R("az")
-patterns.uppercase=R("AZ")
+patterns.lowercase=lowercase
+patterns.uppercase=uppercase
patterns.letter=patterns.lowercase+patterns.uppercase
patterns.space=space
patterns.tab=P("\t")
@@ -179,12 +190,12 @@ patterns.spaceortab=patterns.space+patterns.tab
patterns.newline=newline
patterns.emptyline=newline^1
patterns.equal=P("=")
-patterns.comma=P(",")
-patterns.commaspacer=P(",")*spacer^0
-patterns.period=P(".")
+patterns.comma=comma
+patterns.commaspacer=comma*spacer^0
+patterns.period=period
patterns.colon=P(":")
patterns.semicolon=P(";")
-patterns.underscore=P("_")
+patterns.underscore=underscore
patterns.escaped=escaped
patterns.squote=squote
patterns.dquote=dquote
@@ -197,10 +208,29 @@ patterns.unspacer=((patterns.spacer^1)/"")^0
patterns.singlequoted=squote*patterns.nosquote*squote
patterns.doublequoted=dquote*patterns.nodquote*dquote
patterns.quoted=patterns.doublequoted+patterns.singlequoted
-patterns.propername=R("AZ","az","__")*R("09","AZ","az","__")^0*P(-1)
+patterns.digit=digit
+patterns.octdigit=octdigit
+patterns.hexdigit=hexdigit
+patterns.sign=sign
+patterns.cardinal=digit^1
+patterns.integer=sign^-1*digit^1
+patterns.unsigned=digit^0*period*digit^1
+patterns.float=sign^-1*patterns.unsigned
+patterns.cunsigned=digit^0*comma*digit^1
+patterns.cfloat=sign^-1*patterns.cunsigned
+patterns.number=patterns.float+patterns.integer
+patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.oct=zero*octdigit^1
+patterns.octal=patterns.oct
+patterns.HEX=zero*P("X")*(digit+uppercase)^1
+patterns.hex=zero*P("x")*(digit+lowercase)^1
+patterns.hexadecimal=zero*S("xX")*hexdigit^1
+patterns.hexafloat=sign^-1*zero*S("xX")*(hexdigit^0*period*hexdigit^1+hexdigit^1*period*hexdigit^0+hexdigit^1)*(S("pP")*sign^-1*hexdigit^1)^-1
+patterns.decafloat=sign^-1*(digit^0*period*digit^1+digit^1*period*digit^0+digit^1)*S("eE")*sign^-1*digit^1
+patterns.propername=(uppercase+lowercase+underscore)*(uppercase+lowercase+underscore+digit)^0*endofstring
patterns.somecontent=(anything-newline-space)^1
patterns.beginline=#(1-newline)
-patterns.longtostring=Cs(whitespace^0/""*nonwhitespace^0*((whitespace^0/" "*(patterns.quoted+nonwhitespace)^1)^0))
+patterns.longtostring=Cs(whitespace^0/""*((patterns.quoted+nonwhitespace^1+whitespace^1/""*(P(-1)+Cc(" ")))^0))
local function anywhere(pattern)
return P { P(pattern)+1*V(1) }
end
@@ -372,7 +402,7 @@ function lpeg.replacer(one,two,makefunction,isutf)
return pattern
end
end
-function lpeg.finder(lst,makefunction)
+function lpeg.finder(lst,makefunction,isutf)
local pattern
if type(lst)=="table" then
pattern=P(false)
@@ -388,7 +418,11 @@ function lpeg.finder(lst,makefunction)
else
pattern=P(lst)
end
- pattern=(1-pattern)^0*pattern
+ if isutf then
+ pattern=((utf8char or 1)-pattern)^0*pattern
+ else
+ pattern=(1-pattern)^0*pattern
+ end
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -401,8 +435,8 @@ local splitters_f,splitters_s={},{}
function lpeg.firstofsplit(separator)
local splitter=splitters_f[separator]
if not splitter then
- separator=P(separator)
- splitter=C((1-separator)^0)
+ local pattern=P(separator)
+ splitter=C((1-pattern)^0)
splitters_f[separator]=splitter
end
return splitter
@@ -410,12 +444,31 @@ end
function lpeg.secondofsplit(separator)
local splitter=splitters_s[separator]
if not splitter then
- separator=P(separator)
- splitter=(1-separator)^0*separator*C(anything^0)
+ local pattern=P(separator)
+ splitter=(1-pattern)^0*pattern*C(anything^0)
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+local splitters_s,splitters_p={},{}
+function lpeg.beforesuffix(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=C((1-pattern)^0)*pattern*endofstring
splitters_s[separator]=splitter
end
return splitter
end
+function lpeg.afterprefix(separator)
+ local splitter=splitters_p[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=pattern*C(anything^0)
+ splitters_p[separator]=splitter
+ end
+ return splitter
+end
function lpeg.balancer(left,right)
left,right=P(left),P(right)
return P { left*((1-left-right)+V(1))^0*right }
@@ -647,9 +700,6 @@ end
function lpeg.times(pattern,n)
return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
end
-local digit=R("09")
-local period=P(".")
-local zero=P("0")
local trailingzeros=zero^0*-digit
local case_1=period*trailingzeros/""
local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
@@ -709,11 +759,15 @@ function string.limit(str,n,sentinel)
end
end
local stripper=patterns.stripper
+local fullstripper=patterns.fullstripper
local collapser=patterns.collapser
local longtostring=patterns.longtostring
function string.strip(str)
return lpegmatch(stripper,str) or ""
end
+function string.fullstrip(str)
+ return lpegmatch(fullstripper,str) or ""
+end
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -847,6 +901,36 @@ local function sortedkeys(tab)
return {}
end
end
+local function sortedhashonly(tab)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if type(key)=="string" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt)
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedindexonly(tab)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if type(key)=="number" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt)
+ return srt
+ else
+ return {}
+ end
+end
local function sortedhashkeys(tab,cmp)
if tab then
local srt,s={},0
@@ -872,6 +956,8 @@ function table.allkeys(t)
return sortedkeys(keys)
end
table.sortedkeys=sortedkeys
+table.sortedhashonly=sortedhashonly
+table.sortedindexonly=sortedindexonly
table.sortedhashkeys=sortedhashkeys
local function nothing() end
local function sortedhash(t,cmp)
@@ -883,10 +969,13 @@ local function sortedhash(t,cmp)
s=sortedkeys(t)
end
local n=0
+ local m=#s
local function kv(s)
- n=n+1
- local k=s[n]
- return k,t[k]
+ if n0 then
@@ -1056,12 +1146,12 @@ local function simple_table(t)
else
tt[nt]=tostring(v)
end
- elseif tv=="boolean" then
- nt=nt+1
- tt[nt]=tostring(v)
elseif tv=="string" then
nt=nt+1
tt[nt]=format("%q",v)
+ elseif tv=="boolean" then
+ nt=nt+1
+ tt[nt]=v and "true" or "false"
else
tt=nil
break
@@ -1094,7 +1184,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s[%q]={",depth,name))
end
elseif tn=="boolean" then
- handle(format("%s[%s]={",depth,tostring(name)))
+ handle(format("%s[%s]={",depth,name and "true" or "false"))
else
handle(format("%s{",depth))
end
@@ -1118,21 +1208,21 @@ local function do_serialize(root,name,depth,level,indexed)
for i=1,#sk do
local k=sk[i]
local v=root[k]
- local t,tk=type(v),type(k)
+ local tv,tk=type(v),type(k)
if compact and first and tk=="number" and k>=first and k<=last then
- if t=="number" then
+ if tv=="number" then
if hexify then
handle(format("%s 0x%04X,",depth,v))
else
handle(format("%s %s,",depth,v))
end
- elseif t=="string" then
+ elseif tv=="string" then
if reduce and tonumber(v) then
handle(format("%s %s,",depth,v))
else
handle(format("%s %q,",depth,v))
end
- elseif t=="table" then
+ elseif tv=="table" then
if not next(v) then
handle(format("%s {},",depth))
elseif inline then
@@ -1145,11 +1235,11 @@ local function do_serialize(root,name,depth,level,indexed)
else
do_serialize(v,k,depth,level+1,true)
end
- elseif t=="boolean" then
- handle(format("%s %s,",depth,tostring(v)))
- elseif t=="function" then
+ elseif tv=="boolean" then
+ handle(format("%s %s,",depth,v and "true" or "false"))
+ elseif tv=="function" then
if functions then
- handle(format('%s load(%q),',depth,dump(v)))
+ handle(format('%s load(%q),',depth,dump(v)))
else
handle(format('%s "function",',depth))
end
@@ -1160,7 +1250,7 @@ local function do_serialize(root,name,depth,level,indexed)
if false then
handle(format("%s __p__=nil,",depth))
end
- elseif t=="number" then
+ elseif tv=="number" then
if tk=="number" then
if hexify then
handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
@@ -1169,9 +1259,9 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif tk=="boolean" then
if hexify then
- handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
+ handle(format("%s [%s]=0x%04X,",depth,k and "true" or "false",v))
else
- handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
end
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
if hexify then
@@ -1186,7 +1276,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%q]=%s,",depth,k,v))
end
end
- elseif t=="string" then
+ elseif tv=="string" then
if reduce and tonumber(v) then
if tk=="number" then
if hexify then
@@ -1195,7 +1285,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]=%s,",depth,k,v))
end
elseif tk=="boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=%s,",depth,k,v))
else
@@ -1209,14 +1299,14 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]=%q,",depth,k,v))
end
elseif tk=="boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),v))
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=%q,",depth,k,v))
else
handle(format("%s [%q]=%q,",depth,k,v))
end
end
- elseif t=="table" then
+ elseif tv=="table" then
if not next(v) then
if tk=="number" then
if hexify then
@@ -1225,7 +1315,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]={},",depth,k))
end
elseif tk=="boolean" then
- handle(format("%s [%s]={},",depth,tostring(k)))
+ handle(format("%s [%s]={},",depth,k and "true" or "false"))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s={},",depth,k))
else
@@ -1241,7 +1331,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
end
elseif tk=="boolean" then
- handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
+ handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", ")))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
else
@@ -1253,21 +1343,21 @@ local function do_serialize(root,name,depth,level,indexed)
else
do_serialize(v,k,depth,level+1)
end
- elseif t=="boolean" then
+ elseif tv=="boolean" then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
+ handle(format("%s [0x%04X]=%s,",depth,k,v and "true" or "false"))
else
- handle(format("%s [%s]=%s,",depth,k,tostring(v)))
+ handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
end
elseif tk=="boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
+ handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false"))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,tostring(v)))
+ handle(format("%s %s=%s,",depth,k,v and "true" or "false"))
else
- handle(format("%s [%q]=%s,",depth,k,tostring(v)))
+ handle(format("%s [%q]=%s,",depth,k,v and "true" or "false"))
end
- elseif t=="function" then
+ elseif tv=="function" then
if functions then
local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
if tk=="number" then
@@ -1277,7 +1367,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]=load(%q),",depth,k,f))
end
elseif tk=="boolean" then
- handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
+ handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=load(%q),",depth,k,f))
else
@@ -1292,7 +1382,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%s]=%q,",depth,k,tostring(v)))
end
elseif tk=="boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v)))
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
handle(format("%s %s=%q,",depth,k,tostring(v)))
else
@@ -1593,7 +1683,9 @@ function table.print(t,...)
serialize(print,t,...)
end
end
-setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+if setinspector then
+ setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+end
function table.sub(t,i,j)
return { unpack(t,i,j) }
end
@@ -1628,6 +1720,24 @@ function table.sorted(t,...)
sort(t,...)
return t
end
+function table.values(t,s)
+ if t then
+ local values,keys,v={},{},0
+ for key,value in next,t do
+ if not keys[value] then
+ v=v+1
+ values[v]=value
+ keys[k]=key
+ end
+ end
+ if s then
+ sort(values)
+ end
+ return values
+ else
+ return {}
+ end
+end
end -- closure
@@ -1645,7 +1755,7 @@ local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
local concat=table.concat
local floor=math.floor
local type=type
-if string.find(os.getenv("PATH"),";") then
+if string.find(os.getenv("PATH"),";",1,true) then
io.fileseparator,io.pathseparator="\\",";"
else
io.fileseparator,io.pathseparator="/",":"
@@ -1662,6 +1772,7 @@ local function readall(f)
return f:read('*all')
else
local done=f:seek("set",0)
+ local step
if size<1024*1024 then
step=1024*1024
elseif size>16*1024*1024 then
@@ -2185,17 +2296,24 @@ end
function file.joinpath(tab,separator)
return tab and concat(tab,separator or io.pathseparator)
end
+local someslash=S("\\/")
local stripper=Cs(P(fwslash)^0/""*reslasher)
-local isnetwork=fwslash*fwslash*(1-fwslash)+(1-fwslash-colon)^1*colon
+local isnetwork=someslash*someslash*(1-someslash)+(1-fwslash-colon)^1*colon
local isroot=fwslash^1*-1
local hasroot=fwslash^1
+local reslasher=lpeg.replacer(S("\\/"),"/")
local deslasher=lpeg.replacer(S("\\/")^1,"/")
function file.join(...)
local lst={... }
local one=lst[1]
if lpegmatch(isnetwork,one) then
+ local one=lpegmatch(reslasher,one)
local two=lpegmatch(deslasher,concat(lst,"/",2))
- return one.."/"..two
+ if lpegmatch(hasroot,two) then
+ return one..two
+ else
+ return one.."/"..two
+ end
elseif lpegmatch(isroot,one) then
local two=lpegmatch(deslasher,concat(lst,"/",2))
if lpegmatch(hasroot,two) then
@@ -2212,7 +2330,9 @@ end
local drivespec=R("az","AZ")^1*colon
local anchors=fwslash+drivespec
local untouched=periods+(1-period)^1*P(-1)
-local splitstarter=(Cs(drivespec*(bwslash/"/"+fwslash)^0)+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
+local mswindrive=Cs(drivespec*(bwslash/"/"+fwslash)^0)
+local mswinuncpath=(bwslash+fwslash)*(bwslash+fwslash)*Cc("//")
+local splitstarter=(mswindrive+mswinuncpath+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
local absolute=fwslash
function file.collapsepath(str,anchor)
if not str then
@@ -2375,9 +2495,9 @@ function string.booleanstring(str)
end
function string.is_boolean(str,default)
if type(str)=="string" then
- if str=="true" or str=="yes" or str=="on" or str=="t" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" or str=="1" then
return true
- elseif str=="false" or str=="no" or str=="off" or str=="f" then
+ elseif str=="false" or str=="no" or str=="off" or str=="f" or str=="0" then
return false
end
end
@@ -2437,15 +2557,28 @@ local unpack,concat=table.unpack,table.concat
local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
local patterns,lpegmatch=lpeg.patterns,lpeg.match
local utfchar,utfbyte=utf.char,utf.byte
-local loadstripped=_LUAVERSION<5.2 and load or function(str)
- return load(dump(load(str),true))
+local loadstripped=nil
+if _LUAVERSION<5.2 then
+ loadstripped=function(str,shortcuts)
+ return load(str)
+ end
+else
+ loadstripped=function(str,shortcuts)
+ if shortcuts then
+ return load(dump(load(str),true),nil,nil,shortcuts)
+ else
+ return load(dump(load(str),true))
+ end
+ end
end
if not number then number={} end
local stripper=patterns.stripzeros
local function points(n)
+ n=tonumber(n)
return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
end
local function basepoints(n)
+ n=tonumber(n)
return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
end
number.points=points
@@ -2508,11 +2641,39 @@ local pattern=Carg(1)/function(t)
function strings.tabtospace(str,tab)
return lpegmatch(pattern,str,1,tab or 7)
end
-function strings.striplong(str)
- str=gsub(str,"^%s*","")
- str=gsub(str,"[\n\r]+ *","\n")
- return str
+local newline=patterns.newline
+local endofstring=patterns.endofstring
+local whitespace=patterns.whitespace
+local spacer=patterns.spacer
+local space=spacer^0
+local nospace=space/""
+local endofline=nospace*newline
+local stripend=(whitespace^1*endofstring)/""
+local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace)
+local stripempty=endofline^1/""
+local normalempty=endofline^1
+local singleempty=endofline*(endofline^0/"")
+local doubleempty=endofline*endofline^-1*(endofline^0/"")
+local stripstart=stripempty^0
+local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 )
+local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 )
+local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 )
+local p_retain_normal=Cs ((normalline+normalempty )^0 )
+local p_retain_collapse=Cs ((normalline+doubleempty )^0 )
+local p_retain_noempty=Cs ((normalline+singleempty )^0 )
+local striplinepatterns={
+ ["prune"]=p_prune_normal,
+ ["prune and collapse"]=p_prune_collapse,
+ ["prune and no empty"]=p_prune_noempty,
+ ["retain"]=p_retain_normal,
+ ["retain and collapse"]=p_retain_collapse,
+ ["retain and no empty"]=p_retain_noempty,
+}
+strings.striplinepatterns=striplinepatterns
+function strings.striplines(str,how)
+ return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str
end
+strings.striplong=strings.striplines
function strings.nice(str)
str=gsub(str,"[:%-+_]+"," ")
return str
@@ -2563,29 +2724,83 @@ function number.signed(i)
return "-",-i
end
end
-local preamble=[[
-local type = type
-local tostring = tostring
-local tonumber = tonumber
-local format = string.format
-local concat = table.concat
-local signed = number.signed
-local points = number.points
-local basepoints = number.basepoints
-local utfchar = utf.char
-local utfbyte = utf.byte
-local lpegmatch = lpeg.match
-local nspaces = string.nspaces
-local tracedchar = string.tracedchar
-local autosingle = string.autosingle
-local autodouble = string.autodouble
-local sequenced = table.sequenced
-]]
+local zero=P("0")^1/""
+local plus=P("+")/""
+local minus=P("-")
+local separator=S(".")
+local digit=R("09")
+local trailing=zero^1*#S("eE")
+local exponent=(S("eE")*(plus+Cs((minus*zero^0*P(-1))/"")+minus)*zero^0*(P(-1)*Cc("0")+P(1)^1))
+local pattern_a=Cs(minus^0*digit^1*(separator/""*trailing+separator*(trailing+digit)^0)*exponent)
+local pattern_b=Cs((exponent+P(1))^0)
+function number.sparseexponent(f,n)
+ if not n then
+ n=f
+ f="%e"
+ end
+ local tn=type(n)
+ if tn=="string" then
+ local m=tonumber(n)
+ if m then
+ return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,m))
+ end
+ elseif tn=="number" then
+ return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,n))
+ end
+ return tostring(n)
+end
local template=[[
%s
%s
return function(%s) return %s end
]]
+local preamble,environment="",{}
+if _LUAVERSION<5.2 then
+ preamble=[[
+local lpeg=lpeg
+local type=type
+local tostring=tostring
+local tonumber=tonumber
+local format=string.format
+local concat=table.concat
+local signed=number.signed
+local points=number.points
+local basepoints= number.basepoints
+local utfchar=utf.char
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local nspaces=string.nspaces
+local tracedchar=string.tracedchar
+local autosingle=string.autosingle
+local autodouble=string.autodouble
+local sequenced=table.sequenced
+local formattednumber=number.formatted
+local sparseexponent=number.sparseexponent
+ ]]
+else
+ environment={
+ global=global or _G,
+ lpeg=lpeg,
+ type=type,
+ tostring=tostring,
+ tonumber=tonumber,
+ format=string.format,
+ concat=table.concat,
+ signed=number.signed,
+ points=number.points,
+ basepoints=number.basepoints,
+ utfchar=utf.char,
+ utfbyte=utf.byte,
+ lpegmatch=lpeg.match,
+ nspaces=string.nspaces,
+ tracedchar=string.tracedchar,
+ autosingle=string.autosingle,
+ autodouble=string.autodouble,
+ sequenced=table.sequenced,
+ formattednumber=number.formatted,
+ sparseexponent=number.sparseexponent,
+ }
+end
local arguments={ "a1" }
setmetatable(arguments,{ __index=function(t,k)
local v=t[k-1]..",a"..k
@@ -2594,7 +2809,7 @@ setmetatable(arguments,{ __index=function(t,k)
end
})
local prefix_any=C((S("+- .")+R("09"))^0)
-local prefix_tab=C((1-R("az","AZ","09","%%"))^0)
+local prefix_tab=P("{")*C((1-P("}"))^0)*P("}")+C((1-R("az","AZ","09","%%"))^0)
local format_s=function(f)
n=n+1
if f and f~="" then
@@ -2624,7 +2839,7 @@ local format_i=function(f)
if f and f~="" then
return format("format('%%%si',a%s)",f,n)
else
- return format("a%s",n)
+ return format("format('%%i',a%s)",n)
end
end
local format_d=format_i
@@ -2636,6 +2851,10 @@ local format_f=function(f)
n=n+1
return format("format('%%%sf',a%s)",f,n)
end
+local format_F=function(f)
+ n=n+1
+ return format("((a%s == 0 and '0') or (a%s == 1 and '1') or format('%%%sf',a%s))",n,n,f,n)
+end
local format_g=function(f)
n=n+1
return format("format('%%%sg',a%s)",f,n)
@@ -2652,6 +2871,14 @@ local format_E=function(f)
n=n+1
return format("format('%%%sE',a%s)",f,n)
end
+local format_j=function(f)
+ n=n+1
+ return format("sparseexponent('%%%se',a%s)",f,n)
+end
+local format_J=function(f)
+ n=n+1
+ return format("sparseexponent('%%%sE',a%s)",f,n)
+end
local format_x=function(f)
n=n+1
return format("format('%%%sx',a%s)",f,n)
@@ -2776,6 +3003,43 @@ end
local format_W=function(f)
return format("nspaces[%s]",tonumber(f) or 0)
end
+local digit=patterns.digit
+local period=patterns.period
+local three=digit*digit*digit
+local splitter=Cs (
+ (((1-(three^1*period))^1+C(three))*(Carg(1)*three)^1+C((1-period)^1))*(P(1)/""*Carg(2))*C(2)
+)
+patterns.formattednumber=splitter
+function number.formatted(n,sep1,sep2)
+ local s=type(s)=="string" and n or format("%0.2f",n)
+ if sep1==true then
+ return lpegmatch(splitter,s,1,".",",")
+ elseif sep1=="." then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ",")
+ elseif sep1=="," then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ".")
+ else
+ return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".")
+ end
+end
+local format_m=function(f)
+ n=n+1
+ if not f or f=="" then
+ f=","
+ end
+ return format([[formattednumber(a%s,%q,".")]],n,f)
+end
+local format_M=function(f)
+ n=n+1
+ if not f or f=="" then
+ f="."
+ end
+ return format([[formattednumber(a%s,%q,",")]],n,f)
+end
+local format_z=function(f)
+ n=n+(tonumber(f) or 1)
+ return "''"
+end
local format_rest=function(s)
return format("%q",s)
end
@@ -2805,15 +3069,17 @@ local builder=Cs { "start",
(
P("%")/""*(
V("!")
-+V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
+V("c")+V("C")+V("S")
+V("Q")
+V("N")
-+V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("h")
-+V("w")
++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("w")
+V("W")
+V("a")
-+V("A")
++V("A")
++V("j")+V("J")
++V("m")+V("M")
++V("z")
+V("*")
)+V("*")
)*(P(-1)+Carg(1))
@@ -2823,6 +3089,7 @@ local builder=Cs { "start",
["i"]=(prefix_any*P("i"))/format_i,
["d"]=(prefix_any*P("d"))/format_d,
["f"]=(prefix_any*P("f"))/format_f,
+ ["F"]=(prefix_any*P("F"))/format_F,
["g"]=(prefix_any*P("g"))/format_g,
["G"]=(prefix_any*P("G"))/format_G,
["e"]=(prefix_any*P("e"))/format_e,
@@ -2844,19 +3111,24 @@ local builder=Cs { "start",
["b"]=(prefix_any*P("b"))/format_b,
["t"]=(prefix_tab*P("t"))/format_t,
["T"]=(prefix_tab*P("T"))/format_T,
- ["l"]=(prefix_tab*P("l"))/format_l,
- ["L"]=(prefix_tab*P("L"))/format_L,
+ ["l"]=(prefix_any*P("l"))/format_l,
+ ["L"]=(prefix_any*P("L"))/format_L,
["I"]=(prefix_any*P("I"))/format_I,
["w"]=(prefix_any*P("w"))/format_w,
["W"]=(prefix_any*P("W"))/format_W,
+ ["j"]=(prefix_any*P("j"))/format_j,
+ ["J"]=(prefix_any*P("J"))/format_J,
+ ["m"]=(prefix_tab*P("m"))/format_m,
+ ["M"]=(prefix_tab*P("M"))/format_M,
+ ["z"]=(prefix_any*P("z"))/format_z,
["a"]=(prefix_any*P("a"))/format_a,
["A"]=(prefix_any*P("A"))/format_A,
- ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%%%")^1)/format_rest,
+ ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest,
["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
}
local direct=Cs (
- P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
- )
+ P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
+)
local function make(t,str)
local f
local p
@@ -2865,10 +3137,10 @@ local function make(t,str)
f=loadstripped(p)()
else
n=0
- p=lpegmatch(builder,str,1,"..",t._extensions_)
+ p=lpegmatch(builder,str,1,t._connector_,t._extensions_)
if n>0 then
p=format(template,preamble,t._preamble_,arguments[n],p)
- f=loadstripped(p)()
+ f=loadstripped(p,t._environment_)()
else
f=function() return str end
end
@@ -2880,10 +3152,22 @@ local function use(t,fmt,...)
return t[fmt](...)
end
strings.formatters={}
-function strings.formatters.new()
- local t={ _extensions_={},_preamble_="",_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
+if _LUAVERSION<5.2 then
+ function strings.formatters.new(noconcat)
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
+else
+ function strings.formatters.new(noconcat)
+ local e={}
+ for k,v in next,environment do
+ e[k]=v
+ end
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
end
local formatters=strings.formatters.new()
string.formatters=formatters
@@ -2891,16 +3175,29 @@ string.formatter=function(str,...) return formatters[str](...) end
local function add(t,name,template,preamble)
if type(t)=="table" and t._type_=="formatter" then
t._extensions_[name]=template or "%s"
- if preamble then
+ if type(preamble)=="string" then
t._preamble_=preamble.."\n"..t._preamble_
+ elseif type(preamble)=="table" then
+ for k,v in next,preamble do
+ t._environment_[k]=v
+ end
end
end
end
strings.formatters.add=add
-lpeg.patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/"""+P(1))^0)
-lpeg.patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
-add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
-add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/"""+P(1))^0)
+patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
+patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
+patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
+if _LUAVERSION<5.2 then
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+else
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
+end
end -- closure
@@ -2979,6 +3276,7 @@ local remapper={
fea="font feature files",
pfa="type1 fonts",
pfb="type1 fonts",
+ afm="afm",
}
function resolvers.findfile(name,fileformat)
name=string.gsub(name,"\\","/")
@@ -2997,6 +3295,10 @@ function resolvers.findfile(name,fileformat)
return found
end
resolvers.findbinfile=resolvers.findfile
+function resolvers.loadbinfile(filename,filetype)
+ local data=io.loaddata(filename)
+ return true,data,#data
+end
function resolvers.resolve(s)
return s
end
@@ -3012,12 +3314,21 @@ if not caches.namespace or caches.namespace=="" or caches.namespace=="context" t
end
do
local cachepaths=kpse.expand_var('$TEXMFCACHE') or ""
- if cachepaths=="" then
+ if cachepaths=="" or cachepaths=="$TEXMFCACHE" then
cachepaths=kpse.expand_var('$TEXMFVAR') or ""
end
- if cachepaths=="" then
+ if cachepaths=="" or cachepaths=="$TEXMFVAR" then
cachepaths=kpse.expand_var('$VARTEXMF') or ""
end
+ if cachepaths=="" then
+ local fallbacks={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" }
+ for i=1,#fallbacks do
+ cachepaths=os.getenv(fallbacks[i]) or ""
+ if cachepath~="" and lfs.isdir(cachepath) then
+ break
+ end
+ end
+ end
if cachepaths=="" then
cachepaths="."
end
@@ -3083,6 +3394,17 @@ function caches.loaddata(paths,name)
for i=1,#paths do
local data=false
local luaname,lucname=makefullname(paths[i],name)
+ if lucname and not lfs.isfile(lucname) and type(caches.compile)=="function" then
+ texio.write(string.format("(compiling luc: %s)",lucname))
+ data=loadfile(luaname)
+ if data then
+ data=data()
+ end
+ if data then
+ caches.compile(data,luaname,lucname)
+ return data
+ end
+ end
if lucname and lfs.isfile(lucname) then
texio.write(string.format("(load luc: %s)",lucname))
data=loadfile(lucname)
@@ -3111,7 +3433,7 @@ function caches.savedata(path,name,data)
local luaname,lucname=makefullname(path,name)
if luaname then
texio.write(string.format("(save: %s)",luaname))
- table.tofile(luaname,data,true,{ reduce=true })
+ table.tofile(luaname,data,true)
if lucname and type(caches.compile)=="function" then
os.remove(lucname)
texio.write(string.format("(save: %s)",lucname))
@@ -3296,17 +3618,27 @@ nodes.handlers={}
local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end
local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end
local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" }
+local disccodes={ [0]="discretionary","explicit","automatic","regular","first","second" }
nodes.nodecodes=nodecodes
nodes.whatcodes=whatcodes
nodes.whatsitcodes=whatcodes
nodes.glyphcodes=glyphcodes
+nodes.disccodes=disccodes
local free_node=node.free
local remove_node=node.remove
local new_node=node.new
local traverse_id=node.traverse_id
-local math_code=nodecodes.math
nodes.handlers.protectglyphs=node.protect_glyphs
nodes.handlers.unprotectglyphs=node.unprotect_glyphs
+local math_code=nodecodes.math
+local end_of_math=node.end_of_math
+function node.end_of_math(n)
+ if n.id==math_code and n.subtype==1 then
+ return n
+ else
+ return end_of_math(n)
+ end
+end
function nodes.remove(head,current,free_too)
local t=current
head,current=remove_node(head,current)
@@ -3323,18 +3655,63 @@ end
function nodes.delete(head,current)
return nodes.remove(head,current,true)
end
-nodes.before=node.insert_before
-nodes.after=node.insert_after
function nodes.pool.kern(k)
local n=new_node("kern",1)
n.kern=k
return n
end
-function nodes.endofmath(n)
- for n in traverse_id(math_code,n.next) do
- return n
- end
-end
+local getfield=node.getfield or function(n,tag) return n[tag] end
+local setfield=node.setfield or function(n,tag,value) n[tag]=value end
+nodes.getfield=getfield
+nodes.setfield=setfield
+nodes.getattr=getfield
+nodes.setattr=setfield
+if node.getid then nodes.getid=node.getid else function nodes.getid (n) return getfield(n,"id") end end
+if node.getsubtype then nodes.getsubtype=node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end
+if node.getnext then nodes.getnext=node.getnext else function nodes.getnext (n) return getfield(n,"next") end end
+if node.getprev then nodes.getprev=node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end
+if node.getchar then nodes.getchar=node.getchar else function nodes.getchar (n) return getfield(n,"char") end end
+if node.getfont then nodes.getfont=node.getfont else function nodes.getfont (n) return getfield(n,"font") end end
+if node.getlist then nodes.getlist=node.getlist else function nodes.getlist (n) return getfield(n,"list") end end
+function nodes.tonut (n) return n end
+function nodes.tonode(n) return n end
+nodes.tostring=node.tostring or tostring
+nodes.copy=node.copy
+nodes.copy_list=node.copy_list
+nodes.delete=node.delete
+nodes.dimensions=node.dimensions
+nodes.end_of_math=node.end_of_math
+nodes.flush_list=node.flush_list
+nodes.flush_node=node.flush_node
+nodes.free=node.free
+nodes.insert_after=node.insert_after
+nodes.insert_before=node.insert_before
+nodes.hpack=node.hpack
+nodes.new=node.new
+nodes.tail=node.tail
+nodes.traverse=node.traverse
+nodes.traverse_id=node.traverse_id
+nodes.slide=node.slide
+nodes.vpack=node.vpack
+nodes.first_glyph=node.first_glyph
+nodes.first_character=node.first_character
+nodes.has_glyph=node.has_glyph or node.first_glyph
+nodes.current_attr=node.current_attr
+nodes.do_ligature_n=node.do_ligature_n
+nodes.has_field=node.has_field
+nodes.last_node=node.last_node
+nodes.usedlist=node.usedlist
+nodes.protrusion_skippable=node.protrusion_skippable
+nodes.write=node.write
+nodes.has_attribute=node.has_attribute
+nodes.set_attribute=node.set_attribute
+nodes.unset_attribute=node.unset_attribute
+nodes.protect_glyphs=node.protect_glyphs
+nodes.unprotect_glyphs=node.unprotect_glyphs
+nodes.kerning=node.kerning
+nodes.ligaturing=node.ligaturing
+nodes.mlist_to_hlist=node.mlist_to_hlist
+nodes.nuts=nodes
end -- closure
@@ -3551,6 +3928,34 @@ function constructors.beforecopyingcharacters(target,original)
end
function constructors.aftercopyingcharacters(target,original)
end
+constructors.sharefonts=false
+constructors.nofsharedfonts=0
+local sharednames={}
+function constructors.trytosharefont(target,tfmdata)
+ if constructors.sharefonts then
+ local characters=target.characters
+ local n=1
+ local t={ target.psname }
+ local u=sortedkeys(characters)
+ for i=1,#u do
+ local k=u[i]
+ n=n+1;t[n]=k
+ n=n+1;t[n]=characters[k].index or k
+ end
+ local h=md5.HEX(concat(t," "))
+ local s=sharednames[h]
+ if s then
+ if trace_defining then
+ report_defining("font %a uses backend resources of font %a",target.fullname,s)
+ end
+ target.fullname=s
+ constructors.nofsharedfonts=constructors.nofsharedfonts+1
+ target.properties.sharedwith=s
+ else
+ sharednames[h]=target.fullname
+ end
+ end
+end
function constructors.enhanceparameters(parameters)
local xheight=parameters.x_height
local quad=parameters.quad
@@ -3578,6 +3983,7 @@ function constructors.scale(tfmdata,specification)
if tonumber(specification) then
specification={ size=specification }
end
+ target.specification=specification
local scaledpoints=specification.size
local relativeid=specification.relativeid
local properties=tfmdata.properties or {}
@@ -3629,7 +4035,7 @@ function constructors.scale(tfmdata,specification)
targetproperties.script=properties.script or "dflt"
targetproperties.mode=properties.mode or "base"
local askedscaledpoints=scaledpoints
- local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints)
+ local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints,nil,specification)
local hdelta=delta
local vdelta=delta
target.designsize=parameters.designsize
@@ -3703,7 +4109,7 @@ function constructors.scale(tfmdata,specification)
end
target.type=isvirtual and "virtual" or "real"
target.postprocessors=tfmdata.postprocessors
- local targetslant=(parameters.slant or parameters[1] or 0)
+ local targetslant=(parameters.slant or parameters[1] or 0)*factors.pt
local targetspace=(parameters.space or parameters[2] or 0)*hdelta
local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta
local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta
@@ -3982,6 +4388,7 @@ function constructors.scale(tfmdata,specification)
targetcharacters[unicode]=chr
end
constructors.aftercopyingcharacters(target,tfmdata)
+ constructors.trytosharefont(target,tfmdata)
return target
end
function constructors.finalize(tfmdata)
@@ -4021,7 +4428,7 @@ function constructors.finalize(tfmdata)
parameters.slantfactor=tfmdata.slant or 0
end
if not parameters.designsize then
- parameters.designsize=tfmdata.designsize or 655360
+ parameters.designsize=tfmdata.designsize or (factors.pt*10)
end
if not parameters.units then
parameters.units=tfmdata.units_per_em or 1000
@@ -4145,11 +4552,11 @@ function constructors.hashinstance(specification,force)
size=math.round(constructors.scaled(size,designsizes[hash]))
specification.size=size
end
- if fallbacks then
- return hash..' @ '..tostring(size)..' @ '..fallbacks
- else
- return hash..' @ '..tostring(size)
- end
+ if fallbacks then
+ return hash..' @ '..tostring(size)..' @ '..fallbacks
+ else
+ return hash..' @ '..tostring(size)
+ end
end
function constructors.setname(tfmdata,specification)
if constructors.namemode=="specification" then
@@ -4383,7 +4790,8 @@ function constructors.collectprocessors(what,tfmdata,features,trace,report)
local whathandler=handlers[what]
local whatfeatures=whathandler.features
local whatprocessors=whatfeatures.processors
- local processors=whatprocessors[properties.mode]
+ local mode=properties.mode
+ local processors=whatprocessors[mode]
if processors then
for i=1,#processors do
local step=processors[i]
@@ -4400,7 +4808,7 @@ function constructors.collectprocessors(what,tfmdata,features,trace,report)
end
end
elseif trace then
- report("no feature processors for mode %a for font %a",mode,tfmdata.properties.fullname)
+ report("no feature processors for mode %a for font %a",mode,properties.fullname)
end
end
return processes
@@ -4411,7 +4819,8 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report)
local whathandler=handlers[what]
local whatfeatures=whathandler.features
local whatmanipulators=whatfeatures.manipulators
- local manipulators=whatmanipulators[properties.mode]
+ local mode=properties.mode
+ local manipulators=whatmanipulators[mode]
if manipulators then
for i=1,#manipulators do
local step=manipulators[i]
@@ -4420,7 +4829,7 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report)
if value then
local action=step.action
if trace then
- report("applying feature manipulator %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
+ report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname)
end
if action then
action(tfmdata,feature,value)
@@ -4780,33 +5189,38 @@ function mappings.addtounicode(data,filename)
if not unicode or unicode=="" then
local split=lpegmatch(namesplitter,name)
local nsplit=split and #split or 0
- if nsplit>=2 then
- local t,n={},0
- for l=1,nsplit do
- local base=split[l]
- local u=unicodes[base] or unicodevector[base]
- if not u then
+ local t,n={},0
+ unicode=true
+ for l=1,nsplit do
+ local base=split[l]
+ local u=unicodes[base] or unicodevector[base]
+ if not u then
+ break
+ elseif type(u)=="table" then
+ if u[1]>=private then
+ unicode=false
break
- elseif type(u)=="table" then
- n=n+1
- t[n]=u[1]
- else
- n=n+1
- t[n]=u
end
- end
- if n==0 then
- elseif n==1 then
- originals[index]=t[1]
- tounicode[index]=tounicode16(t[1],name)
+ n=n+1
+ t[n]=u[1]
else
- originals[index]=t
- tounicode[index]=tounicode16sequence(t)
+ if u>=private then
+ unicode=false
+ break
+ end
+ n=n+1
+ t[n]=u
end
- nl=nl+1
- unicode=true
+ end
+ if n==0 then
+ elseif n==1 then
+ originals[index]=t[1]
+ tounicode[index]=tounicode16(t[1],name)
else
+ originals[index]=t
+ tounicode[index]=tounicode16sequence(t)
end
+ nl=nl+1
end
if not unicode or unicode=="" then
local foundcodes,multiple=lpegmatch(uparser,name)
@@ -4917,27 +5331,123 @@ fonts.names.resolvespec=fonts.names.resolve
function fonts.names.getfilename(askedname,suffix)
return ""
end
+function fonts.names.ignoredfile(filename)
+ return false
+end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['luatex-fonts-tfm']={
+if not modules then modules={} end modules ['font-tfm']={
version=1.001,
- comment="companion to luatex-*.tex",
+ comment="companion to font-ini.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
+local next=next
+local match=string.match
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local trace_features=false trackers.register("tfm.features",function(v) trace_features=v end)
+local report_defining=logs.reporter("fonts","defining")
+local report_tfm=logs.reporter("fonts","tfm loading")
+local findbinfile=resolvers.findbinfile
local fonts=fonts
-local tfm={}
-fonts.handlers.tfm=tfm
-fonts.formats.tfm="type1"
-function fonts.readers.tfm(specification)
+local handlers=fonts.handlers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local encodings=fonts.encodings
+local tfm=constructors.newhandler("tfm")
+local tfmfeatures=constructors.newfeatures("tfm")
+local registertfmfeature=tfmfeatures.register
+constructors.resolvevirtualtoo=false
+fonts.formats.tfm="type1"
+function tfm.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm)
+ if okay then
+ return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm)
+ else
+ return {}
+ end
+end
+local function read_from_tfm(specification)
+ local filename=specification.filename
+ local size=specification.size
+ if trace_defining then
+ report_defining("loading tfm file %a at size %s",filename,size)
+ end
+ local tfmdata=font.read_tfm(filename,size)
+ if tfmdata then
+ local features=specification.features and specification.features.normal or {}
+ local resources=tfmdata.resources or {}
+ local properties=tfmdata.properties or {}
+ local parameters=tfmdata.parameters or {}
+ local shared=tfmdata.shared or {}
+ properties.name=tfmdata.name
+ properties.fontname=tfmdata.fontname
+ properties.psname=tfmdata.psname
+ properties.filename=specification.filename
+ parameters.size=size
+ shared.rawdata={}
+ shared.features=features
+ shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil
+ tfmdata.properties=properties
+ tfmdata.resources=resources
+ tfmdata.parameters=parameters
+ tfmdata.shared=shared
+ parameters.slant=parameters.slant or parameters[1] or 0
+ parameters.space=parameters.space or parameters[2] or 0
+ parameters.space_stretch=parameters.space_stretch or parameters[3] or 0
+ parameters.space_shrink=parameters.space_shrink or parameters[4] or 0
+ parameters.x_height=parameters.x_height or parameters[5] or 0
+ parameters.quad=parameters.quad or parameters[6] or 0
+ parameters.extra_space=parameters.extra_space or parameters[7] or 0
+ constructors.enhanceparameters(parameters)
+ if constructors.resolvevirtualtoo then
+ fonts.loggers.register(tfmdata,file.suffix(filename),specification)
+ local vfname=findbinfile(specification.name,'ovf')
+ if vfname and vfname~="" then
+ local vfdata=font.read_vf(vfname,size)
+ if vfdata then
+ local chars=tfmdata.characters
+ for k,v in next,vfdata.characters do
+ chars[k].commands=v.commands
+ end
+ properties.virtualized=true
+ tfmdata.fonts=vfdata.fonts
+ end
+ end
+ end
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm)
+ if not features.encoding then
+ local encoding,filename=match(properties.filename,"^(.-)%-(.*)$")
+ if filename and encoding and encodings.known and encodings.known[encoding] then
+ features.encoding=encoding
+ end
+ end
+ return tfmdata
+ end
+end
+local function check_tfm(specification,fullname)
+ local foundname=findbinfile(fullname,'tfm') or ""
+ if foundname=="" then
+ foundname=findbinfile(fullname,'ofm') or ""
+ end
+ if foundname=="" then
+ foundname=fonts.names.getfilename(fullname,"tfm") or ""
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="ofm"
+ return read_from_tfm(specification)
+ elseif trace_defining then
+ report_defining("loading tfm with name %a fails",specification.name)
+ end
+end
+readers.check_tfm=check_tfm
+function readers.tfm(specification)
local fullname=specification.filename or ""
if fullname=="" then
local forced=specification.forced or ""
@@ -4947,58 +5457,1059 @@ function fonts.readers.tfm(specification)
fullname=specification.name
end
end
- local foundname=resolvers.findbinfile(fullname,'tfm') or ""
- if foundname=="" then
- foundname=resolvers.findbinfile(fullname,'ofm') or ""
- end
- if foundname~="" then
- specification.filename=foundname
- specification.format="ofm"
- return font.read_tfm(specification.filename,specification.size)
- end
+ return check_tfm(specification,fullname)
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['font-oti']={
+if not modules then modules={} end modules ['font-afm']={
version=1.001,
comment="companion to font-ini.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local lower=string.lower
-local fonts=fonts
+local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers
+local next,type,tonumber=next,type,tonumber
+local format,match,gmatch,lower,gsub,strip=string.format,string.match,string.gmatch,string.lower,string.gsub,string.strip
+local abs=math.abs
+local P,S,C,R,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.C,lpeg.R,lpeg.match,lpeg.patterns
+local derivetable=table.derive
+local trace_features=false trackers.register("afm.features",function(v) trace_features=v end)
+local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end)
+local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end)
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local report_afm=logs.reporter("fonts","afm loading")
+local findbinfile=resolvers.findbinfile
+local definers=fonts.definers
+local readers=fonts.readers
local constructors=fonts.constructors
-local otf=constructors.newhandler("otf")
-local otffeatures=constructors.newfeatures("otf")
-local otftables=otf.tables
-local registerotffeature=otffeatures.register
-local allocate=utilities.storage.allocate
-registerotffeature {
- name="features",
- description="initialization of feature handler",
- default=true,
-}
+local afm=constructors.newhandler("afm")
+local pfb=constructors.newhandler("pfb")
+local afmfeatures=constructors.newfeatures("afm")
+local registerafmfeature=afmfeatures.register
+afm.version=1.410
+afm.cache=containers.define("fonts","afm",afm.version,true)
+afm.autoprefixed=true
+afm.helpdata={}
+afm.syncspace=true
+afm.addligatures=true
+afm.addtexligatures=true
+afm.addkerns=true
+local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
local function setmode(tfmdata,value)
if value then
tfmdata.properties.mode=lower(value)
end
end
-local function setlanguage(tfmdata,value)
- if value then
- local cleanvalue=lower(value)
- local languages=otftables and otftables.languages
- local properties=tfmdata.properties
- if not languages then
- properties.language=cleanvalue
- elseif languages[value] then
- properties.language=cleanvalue
- else
- properties.language="dflt"
- end
+registerafmfeature {
+ name="mode",
+ description="mode",
+ initializers={
+ base=setmode,
+ node=setmode,
+ }
+}
+local comment=P("Comment")
+local spacing=patterns.spacer
+local lineend=patterns.newline
+local words=C((1-lineend)^1)
+local number=C((R("09")+S("."))^1)/tonumber*spacing^0
+local data=lpeg.Carg(1)
+local pattern=(
+ comment*spacing*(
+ data*(
+ ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end
+ )+(1-lineend)^0
+ )+(1-comment)^1
+)^0
+local function scan_comment(str)
+ local fd={}
+ lpegmatch(pattern,str,1,fd)
+ return fd
+end
+local keys={}
+function keys.FontName (data,line) data.metadata.fontname=strip (line)
+ data.metadata.fullname=strip (line) end
+function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end
+function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch=toboolean(line,true) end
+function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end
+function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end
+function keys.Descender (data,line) data.metadata.descender=tonumber (line) end
+function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end
+function keys.Comment (data,line)
+ line=lower(line)
+ local designsize=match(line,"designsize[^%d]*(%d+)")
+ if designsize then data.metadata.designsize=tonumber(designsize) end
+end
+local function get_charmetrics(data,charmetrics,vector)
+ local characters=data.characters
+ local chr,ind={},0
+ for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do
+ if k=='C' then
+ v=tonumber(v)
+ if v<0 then
+ ind=ind+1
+ else
+ ind=v
+ end
+ chr={
+ index=ind
+ }
+ elseif k=='WX' then
+ chr.width=tonumber(v)
+ elseif k=='N' then
+ characters[v]=chr
+ elseif k=='B' then
+ local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$")
+ chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) }
+ elseif k=='L' then
+ local plus,becomes=match(v,"^(.-) +(.-)$")
+ local ligatures=chr.ligatures
+ if ligatures then
+ ligatures[plus]=becomes
+ else
+ chr.ligatures={ [plus]=becomes }
+ end
+ end
+ end
+end
+local function get_kernpairs(data,kernpairs)
+ local characters=data.characters
+ for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do
+ local chr=characters[one]
+ if chr then
+ local kerns=chr.kerns
+ if kerns then
+ kerns[two]=tonumber(value)
+ else
+ chr.kerns={ [two]=tonumber(value) }
+ end
+ end
+ end
+end
+local function get_variables(data,fontmetrics)
+ for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do
+ local keyhandler=keys[key]
+ if keyhandler then
+ keyhandler(data,rest)
+ end
+ end
+end
+local function get_indexes(data,pfbname)
+ data.resources.filename=resolvers.unresolve(pfbname)
+ local pfbblob=fontloader.open(pfbname)
+ if pfbblob then
+ local characters=data.characters
+ local pfbdata=fontloader.to_table(pfbblob)
+ if pfbdata then
+ local glyphs=pfbdata.glyphs
+ if glyphs then
+ if trace_loading then
+ report_afm("getting index data from %a",pfbname)
+ end
+ for index,glyph in next,glyphs do
+ local name=glyph.name
+ if name then
+ local char=characters[name]
+ if char then
+ if trace_indexing then
+ report_afm("glyph %a has index %a",name,index)
+ end
+ char.index=index
+ end
+ end
+ end
+ elseif trace_loading then
+ report_afm("no glyph data in pfb file %a",pfbname)
+ end
+ elseif trace_loading then
+ report_afm("no data in pfb file %a",pfbname)
+ end
+ fontloader.close(pfbblob)
+ elseif trace_loading then
+ report_afm("invalid pfb file %a",pfbname)
+ end
+end
+local function readafm(filename)
+ local ok,afmblob,size=resolvers.loadbinfile(filename)
+ if ok and afmblob then
+ local data={
+ resources={
+ filename=resolvers.unresolve(filename),
+ version=afm.version,
+ creator="context mkiv",
+ },
+ properties={
+ hasitalics=false,
+ },
+ goodies={},
+ metadata={
+ filename=file.removesuffix(file.basename(filename))
+ },
+ characters={
+ },
+ descriptions={
+ },
+ }
+ afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics)
+ if trace_loading then
+ report_afm("loading char metrics")
+ end
+ get_charmetrics(data,charmetrics,vector)
+ return ""
+ end)
+ afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs)
+ if trace_loading then
+ report_afm("loading kern pairs")
+ end
+ get_kernpairs(data,kernpairs)
+ return ""
+ end)
+ afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics)
+ if trace_loading then
+ report_afm("loading variables")
+ end
+ data.afmversion=version
+ get_variables(data,fontmetrics)
+ data.fontdimens=scan_comment(fontmetrics)
+ return ""
+ end)
+ return data
+ else
+ if trace_loading then
+ report_afm("no valid afm file %a",filename)
+ end
+ return nil
+ end
+end
+local addkerns,addligatures,addtexligatures,unify,normalize
+function afm.load(filename)
+ filename=resolvers.findfile(filename,'afm') or ""
+ if filename~="" and not fonts.names.ignoredfile(filename) then
+ local name=file.removesuffix(file.basename(filename))
+ local data=containers.read(afm.cache,name)
+ local attr=lfs.attributes(filename)
+ local size,time=attr.size or 0,attr.modification or 0
+ local pfbfile=file.replacesuffix(name,"pfb")
+ local pfbname=resolvers.findfile(pfbfile,"pfb") or ""
+ if pfbname=="" then
+ pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or ""
+ end
+ local pfbsize,pfbtime=0,0
+ if pfbname~="" then
+ local attr=lfs.attributes(pfbname)
+ pfbsize=attr.size or 0
+ pfbtime=attr.modification or 0
+ end
+ if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then
+ report_afm("reading %a",filename)
+ data=readafm(filename)
+ if data then
+ if pfbname~="" then
+ get_indexes(data,pfbname)
+ elseif trace_loading then
+ report_afm("no pfb file for %a",filename)
+ end
+ report_afm("unifying %a",filename)
+ unify(data,filename)
+ if afm.addligatures then
+ report_afm("add ligatures")
+ addligatures(data)
+ end
+ if afm.addtexligatures then
+ report_afm("add tex ligatures")
+ addtexligatures(data)
+ end
+ if afm.addkerns then
+ report_afm("add extra kerns")
+ addkerns(data)
+ end
+ normalize(data)
+ report_afm("add tounicode data")
+ fonts.mappings.addtounicode(data,filename)
+ data.size=size
+ data.time=time
+ data.pfbsize=pfbsize
+ data.pfbtime=pfbtime
+ report_afm("saving %a in cache",name)
+ data=containers.write(afm.cache,name,data)
+ data=containers.read(afm.cache,name)
+ end
+ if applyruntimefixes and data then
+ applyruntimefixes(filename,data)
+ end
+ end
+ return data
+ else
+ return nil
+ end
+end
+local uparser=fonts.mappings.makenameparser()
+unify=function(data,filename)
+ local unicodevector=fonts.encodings.agl.unicodes
+ local unicodes,names={},{}
+ local private=constructors.privateoffset
+ local descriptions=data.descriptions
+ for name,blob in next,data.characters do
+ local code=unicodevector[name]
+ if not code then
+ code=lpegmatch(uparser,name)
+ if not code then
+ code=private
+ private=private+1
+ report_afm("assigning private slot %U for unknown glyph name %a",code,name)
+ end
+ end
+ local index=blob.index
+ unicodes[name]=code
+ names[name]=index
+ blob.name=name
+ descriptions[code]={
+ boundingbox=blob.boundingbox,
+ width=blob.width,
+ kerns=blob.kerns,
+ index=index,
+ name=name,
+ }
+ end
+ for unicode,description in next,descriptions do
+ local kerns=description.kerns
+ if kerns then
+ local krn={}
+ for name,kern in next,kerns do
+ local unicode=unicodes[name]
+ if unicode then
+ krn[unicode]=kern
+ else
+ end
+ end
+ description.kerns=krn
+ end
+ end
+ data.characters=nil
+ local resources=data.resources
+ local filename=resources.filename or file.removesuffix(file.basename(filename))
+ resources.filename=resolvers.unresolve(filename)
+ resources.unicodes=unicodes
+ resources.marks={}
+ resources.names=names
+ resources.private=private
+end
+normalize=function(data)
+end
+local addthem=function(rawdata,ligatures)
+ if ligatures then
+ local descriptions=rawdata.descriptions
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ local names=resources.names
+ for ligname,ligdata in next,ligatures do
+ local one=descriptions[unicodes[ligname]]
+ if one then
+ for _,pair in next,ligdata do
+ local two,three=unicodes[pair[1]],unicodes[pair[2]]
+ if two and three then
+ local ol=one.ligatures
+ if ol then
+ if not ol[two] then
+ ol[two]=three
+ end
+ else
+ one.ligatures={ [two]=three }
+ end
+ end
+ end
+ end
+ end
+ end
+end
+addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end
+addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end
+addkerns=function(rawdata)
+ local descriptions=rawdata.descriptions
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ local function do_it_left(what)
+ if what then
+ for unicode,description in next,descriptions do
+ local kerns=description.kerns
+ if kerns then
+ local extrakerns
+ for complex,simple in next,what do
+ complex=unicodes[complex]
+ simple=unicodes[simple]
+ if complex and simple then
+ local ks=kerns[simple]
+ if ks and not kerns[complex] then
+ if extrakerns then
+ extrakerns[complex]=ks
+ else
+ extrakerns={ [complex]=ks }
+ end
+ end
+ end
+ end
+ if extrakerns then
+ description.extrakerns=extrakerns
+ end
+ end
+ end
+ end
+ end
+ local function do_it_copy(what)
+ if what then
+ for complex,simple in next,what do
+ complex=unicodes[complex]
+ simple=unicodes[simple]
+ if complex and simple then
+ local complexdescription=descriptions[complex]
+ if complexdescription then
+ local simpledescription=descriptions[complex]
+ if simpledescription then
+ local extrakerns
+ local kerns=simpledescription.kerns
+ if kerns then
+ for unicode,kern in next,kerns do
+ if extrakerns then
+ extrakerns[unicode]=kern
+ else
+ extrakerns={ [unicode]=kern }
+ end
+ end
+ end
+ local extrakerns=simpledescription.extrakerns
+ if extrakerns then
+ for unicode,kern in next,extrakerns do
+ if extrakerns then
+ extrakerns[unicode]=kern
+ else
+ extrakerns={ [unicode]=kern }
+ end
+ end
+ end
+ if extrakerns then
+ complexdescription.extrakerns=extrakerns
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ do_it_left(afm.helpdata.leftkerned)
+ do_it_left(afm.helpdata.bothkerned)
+ do_it_copy(afm.helpdata.bothkerned)
+ do_it_copy(afm.helpdata.rightkerned)
+end
+local function adddimensions(data)
+ if data then
+ for unicode,description in next,data.descriptions do
+ local bb=description.boundingbox
+ if bb then
+ local ht,dp=bb[4],-bb[2]
+ if ht==0 or ht<0 then
+ else
+ description.height=ht
+ end
+ if dp==0 or dp<0 then
+ else
+ description.depth=dp
+ end
+ end
+ end
+ end
+end
+local function copytotfm(data)
+ if data and data.descriptions then
+ local metadata=data.metadata
+ local resources=data.resources
+ local properties=derivetable(data.properties)
+ local descriptions=derivetable(data.descriptions)
+ local goodies=derivetable(data.goodies)
+ local characters={}
+ local parameters={}
+ local unicodes=resources.unicodes
+ for unicode,description in next,data.descriptions do
+ characters[unicode]={}
+ end
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname or metadata.fullname
+ local fullname=metadata.fullname or metadata.fontname
+ local endash=unicodes['space']
+ local emdash=unicodes['emdash']
+ local spacer="space"
+ local spaceunits=500
+ local monospaced=metadata.isfixedpitch
+ local charwidth=metadata.charwidth
+ local italicangle=metadata.italicangle
+ local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight
+ properties.monospaced=monospaced
+ parameters.italicangle=italicangle
+ parameters.charwidth=charwidth
+ parameters.charxheight=charxheight
+ if properties.monospaced then
+ if descriptions[endash] then
+ spaceunits,spacer=descriptions[endash].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width,"emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ else
+ if descriptions[endash] then
+ spaceunits,spacer=descriptions[endash].width,"space"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ end
+ spaceunits=tonumber(spaceunits)
+ if spaceunits<200 then
+ end
+ parameters.slant=0
+ parameters.space=spaceunits
+ parameters.space_stretch=500
+ parameters.space_shrink=333
+ parameters.x_height=400
+ parameters.quad=1000
+ if italicangle and italicangle~=0 then
+ parameters.italicangle=italicangle
+ parameters.italicfactor=math.cos(math.rad(90+italicangle))
+ parameters.slant=- math.tan(italicangle*math.pi/180)
+ end
+ if monospaced then
+ parameters.space_stretch=0
+ parameters.space_shrink=0
+ elseif afm.syncspace then
+ parameters.space_stretch=spaceunits/2
+ parameters.space_shrink=spaceunits/3
+ end
+ parameters.extra_space=parameters.space_shrink
+ if charxheight then
+ parameters.x_height=charxheight
+ else
+ local x=unicodes['x']
+ if x then
+ local x=descriptions[x]
+ if x then
+ parameters.x_height=x.height
+ end
+ end
+ end
+ local fd=data.fontdimens
+ if fd and fd[8] and fd[9] and fd[10] then
+ for k,v in next,fd do
+ parameters[k]=v
+ end
+ end
+ parameters.designsize=(metadata.designsize or 10)*65536
+ parameters.ascender=abs(metadata.ascender or 0)
+ parameters.descender=abs(metadata.descender or 0)
+ parameters.units=1000
+ properties.spacer=spacer
+ properties.encodingbytes=2
+ properties.format=fonts.formats[filename] or "type1"
+ properties.filename=filename
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.psname=fullname
+ properties.name=filename or fullname or fontname
+ if next(characters) then
+ return {
+ characters=characters,
+ descriptions=descriptions,
+ parameters=parameters,
+ resources=resources,
+ properties=properties,
+ goodies=goodies,
+ }
+ end
+ end
+ return nil
+end
+function afm.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm)
+ if okay then
+ return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm)
+ else
+ return {}
+ end
+end
+local function checkfeatures(specification)
+end
+local function afmtotfm(specification)
+ local afmname=specification.filename or specification.name
+ if specification.forced=="afm" or specification.format=="afm" then
+ if trace_loading then
+ report_afm("forcing afm format for %a",afmname)
+ end
+ else
+ local tfmname=findbinfile(afmname,"ofm") or ""
+ if tfmname~="" then
+ if trace_loading then
+ report_afm("fallback from afm to tfm for %a",afmname)
+ end
+ return
+ end
+ end
+ if afmname~="" then
+ local features=constructors.checkedfeatures("afm",specification.features.normal)
+ specification.features.normal=features
+ constructors.hashinstance(specification,true)
+ specification=definers.resolve(specification)
+ local cache_id=specification.hash
+ local tfmdata=containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local rawdata=afm.load(afmname)
+ if rawdata and next(rawdata) then
+ adddimensions(rawdata)
+ tfmdata=copytotfm(rawdata)
+ if tfmdata and next(tfmdata) then
+ local shared=tfmdata.shared
+ if not shared then
+ shared={}
+ tfmdata.shared=shared
+ end
+ shared.rawdata=rawdata
+ shared.features=features
+ shared.processes=afm.setfeatures(tfmdata,features)
+ end
+ elseif trace_loading then
+ report_afm("no (valid) afm file found with name %a",afmname)
+ end
+ tfmdata=containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+ end
+end
+local function read_from_afm(specification)
+ local tfmdata=afmtotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name=specification.name
+ tfmdata=constructors.scale(tfmdata,specification)
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm)
+ fonts.loggers.register(tfmdata,'afm',specification)
+ end
+ return tfmdata
+end
+local function prepareligatures(tfmdata,ligatures,value)
+ if value then
+ local descriptions=tfmdata.descriptions
+ for unicode,character in next,tfmdata.characters do
+ local description=descriptions[unicode]
+ local dligatures=description.ligatures
+ if dligatures then
+ local cligatures=character.ligatures
+ if not cligatures then
+ cligatures={}
+ character.ligatures=cligatures
+ end
+ for unicode,ligature in next,dligatures do
+ cligatures[unicode]={
+ char=ligature,
+ type=0
+ }
+ end
+ end
+ end
+ end
+end
+local function preparekerns(tfmdata,kerns,value)
+ if value then
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ local descriptions=tfmdata.descriptions
+ for u,chr in next,tfmdata.characters do
+ local d=descriptions[u]
+ local newkerns=d[kerns]
+ if newkerns then
+ local kerns=chr.kerns
+ if not kerns then
+ kerns={}
+ chr.kerns=kerns
+ end
+ for k,v in next,newkerns do
+ local uk=unicodes[k]
+ if uk then
+ kerns[uk]=v
+ end
+ end
+ end
+ end
+ end
+end
+local list={
+ [0x0027]=0x2019,
+}
+local function texreplacements(tfmdata,value)
+ local descriptions=tfmdata.descriptions
+ local characters=tfmdata.characters
+ for k,v in next,list do
+ characters [k]=characters [v]
+ descriptions[k]=descriptions[v]
+ end
+end
+local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures',value) end
+local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end
+local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns',value) end
+local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end
+registerafmfeature {
+ name="liga",
+ description="traditional ligatures",
+ initializers={
+ base=ligatures,
+ node=ligatures,
+ }
+}
+registerafmfeature {
+ name="kern",
+ description="intercharacter kerning",
+ initializers={
+ base=kerns,
+ node=kerns,
+ }
+}
+registerafmfeature {
+ name="extrakerns",
+ description="additional intercharacter kerning",
+ initializers={
+ base=extrakerns,
+ node=extrakerns,
+ }
+}
+registerafmfeature {
+ name='tlig',
+ description='tex ligatures',
+ initializers={
+ base=texligatures,
+ node=texligatures,
+ }
+}
+registerafmfeature {
+ name='trep',
+ description='tex replacements',
+ initializers={
+ base=texreplacements,
+ node=texreplacements,
+ }
+}
+local check_tfm=readers.check_tfm
+fonts.formats.afm="type1"
+fonts.formats.pfb="type1"
+local function check_afm(specification,fullname)
+ local foundname=findbinfile(fullname,'afm') or ""
+ if foundname=="" then
+ foundname=fonts.names.getfilename(fullname,"afm") or ""
+ end
+ if foundname=="" and afm.autoprefixed then
+ local encoding,shortname=match(fullname,"^(.-)%-(.*)$")
+ if encoding and shortname and fonts.encodings.known[encoding] then
+ shortname=findbinfile(shortname,'afm') or ""
+ if shortname~="" then
+ foundname=shortname
+ if trace_defining then
+ report_afm("stripping encoding prefix from filename %a",afmname)
+ end
+ end
+ end
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="afm"
+ return read_from_afm(specification)
+ end
+end
+function readers.afm(specification,method)
+ local fullname,tfmdata=specification.filename or "",nil
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ tfmdata=check_afm(specification,specification.name.."."..forced)
+ end
+ if not tfmdata then
+ method=method or definers.method or "afm or tfm"
+ if method=="tfm" then
+ tfmdata=check_tfm(specification,specification.name)
+ elseif method=="afm" then
+ tfmdata=check_afm(specification,specification.name)
+ elseif method=="tfm or afm" then
+ tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name)
+ else
+ tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name)
+ end
+ end
+ else
+ tfmdata=check_afm(specification,fullname)
+ end
+ return tfmdata
+end
+function readers.pfb(specification,method)
+ local original=specification.specification
+ if trace_defining then
+ report_afm("using afm reader for %a",original)
+ end
+ specification.specification=gsub(original,"%.pfb",".afm")
+ specification.forced="afm"
+ return readers.afm(specification,method)
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-afk']={
+ version=1.001,
+ comment="companion to font-afm.lua",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+ dataonly=true,
+}
+local allocate=utilities.storage.allocate
+fonts.handlers.afm.helpdata={
+ ligatures=allocate {
+ ['f']={
+ { 'f','ff' },
+ { 'i','fi' },
+ { 'l','fl' },
+ },
+ ['ff']={
+ { 'i','ffi' }
+ },
+ ['fi']={
+ { 'i','fii' }
+ },
+ ['fl']={
+ { 'i','fli' }
+ },
+ ['s']={
+ { 't','st' }
+ },
+ ['i']={
+ { 'j','ij' }
+ },
+ },
+ texligatures=allocate {
+ ['quoteleft']={
+ { 'quoteleft','quotedblleft' }
+ },
+ ['quoteright']={
+ { 'quoteright','quotedblright' }
+ },
+ ['hyphen']={
+ { 'hyphen','endash' }
+ },
+ ['endash']={
+ { 'hyphen','emdash' }
+ }
+ },
+ leftkerned=allocate {
+ AEligature="A",aeligature="a",
+ OEligature="O",oeligature="o",
+ IJligature="I",ijligature="i",
+ AE="A",ae="a",
+ OE="O",oe="o",
+ IJ="I",ij="i",
+ Ssharp="S",ssharp="s",
+ },
+ rightkerned=allocate {
+ AEligature="E",aeligature="e",
+ OEligature="E",oeligature="e",
+ IJligature="J",ijligature="j",
+ AE="E",ae="e",
+ OE="E",oe="e",
+ IJ="J",ij="j",
+ Ssharp="S",ssharp="s",
+ },
+ bothkerned=allocate {
+ Acircumflex="A",acircumflex="a",
+ Ccircumflex="C",ccircumflex="c",
+ Ecircumflex="E",ecircumflex="e",
+ Gcircumflex="G",gcircumflex="g",
+ Hcircumflex="H",hcircumflex="h",
+ Icircumflex="I",icircumflex="i",
+ Jcircumflex="J",jcircumflex="j",
+ Ocircumflex="O",ocircumflex="o",
+ Scircumflex="S",scircumflex="s",
+ Ucircumflex="U",ucircumflex="u",
+ Wcircumflex="W",wcircumflex="w",
+ Ycircumflex="Y",ycircumflex="y",
+ Agrave="A",agrave="a",
+ Egrave="E",egrave="e",
+ Igrave="I",igrave="i",
+ Ograve="O",ograve="o",
+ Ugrave="U",ugrave="u",
+ Ygrave="Y",ygrave="y",
+ Atilde="A",atilde="a",
+ Itilde="I",itilde="i",
+ Otilde="O",otilde="o",
+ Utilde="U",utilde="u",
+ Ntilde="N",ntilde="n",
+ Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a",
+ Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e",
+ Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i",
+ Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o",
+ Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u",
+ Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y",
+ Aacute="A",aacute="a",
+ Cacute="C",cacute="c",
+ Eacute="E",eacute="e",
+ Iacute="I",iacute="i",
+ Lacute="L",lacute="l",
+ Nacute="N",nacute="n",
+ Oacute="O",oacute="o",
+ Racute="R",racute="r",
+ Sacute="S",sacute="s",
+ Uacute="U",uacute="u",
+ Yacute="Y",yacute="y",
+ Zacute="Z",zacute="z",
+ Dstroke="D",dstroke="d",
+ Hstroke="H",hstroke="h",
+ Tstroke="T",tstroke="t",
+ Cdotaccent="C",cdotaccent="c",
+ Edotaccent="E",edotaccent="e",
+ Gdotaccent="G",gdotaccent="g",
+ Idotaccent="I",idotaccent="i",
+ Zdotaccent="Z",zdotaccent="z",
+ Amacron="A",amacron="a",
+ Emacron="E",emacron="e",
+ Imacron="I",imacron="i",
+ Omacron="O",omacron="o",
+ Umacron="U",umacron="u",
+ Ccedilla="C",ccedilla="c",
+ Kcedilla="K",kcedilla="k",
+ Lcedilla="L",lcedilla="l",
+ Ncedilla="N",ncedilla="n",
+ Rcedilla="R",rcedilla="r",
+ Scedilla="S",scedilla="s",
+ Tcedilla="T",tcedilla="t",
+ Ohungarumlaut="O",ohungarumlaut="o",
+ Uhungarumlaut="U",uhungarumlaut="u",
+ Aogonek="A",aogonek="a",
+ Eogonek="E",eogonek="e",
+ Iogonek="I",iogonek="i",
+ Uogonek="U",uogonek="u",
+ Aring="A",aring="a",
+ Uring="U",uring="u",
+ Abreve="A",abreve="a",
+ Ebreve="E",ebreve="e",
+ Gbreve="G",gbreve="g",
+ Ibreve="I",ibreve="i",
+ Obreve="O",obreve="o",
+ Ubreve="U",ubreve="u",
+ Ccaron="C",ccaron="c",
+ Dcaron="D",dcaron="d",
+ Ecaron="E",ecaron="e",
+ Lcaron="L",lcaron="l",
+ Ncaron="N",ncaron="n",
+ Rcaron="R",rcaron="r",
+ Scaron="S",scaron="s",
+ Tcaron="T",tcaron="t",
+ Zcaron="Z",zcaron="z",
+ dotlessI="I",dotlessi="i",
+ dotlessJ="J",dotlessj="j",
+ AEligature="AE",aeligature="ae",AE="AE",ae="ae",
+ OEligature="OE",oeligature="oe",OE="OE",oe="oe",
+ IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij",
+ Lstroke="L",lstroke="l",Lslash="L",lslash="l",
+ Ostroke="O",ostroke="o",Oslash="O",oslash="o",
+ Ssharp="SS",ssharp="ss",
+ Aumlaut="A",aumlaut="a",
+ Eumlaut="E",eumlaut="e",
+ Iumlaut="I",iumlaut="i",
+ Oumlaut="O",oumlaut="o",
+ Uumlaut="U",uumlaut="u",
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-tfm']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local tfm={}
+fonts.handlers.tfm=tfm
+fonts.formats.tfm="type1"
+function fonts.readers.tfm(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ local foundname=resolvers.findbinfile(fullname,'tfm') or ""
+ if foundname=="" then
+ foundname=resolvers.findbinfile(fullname,'ofm') or ""
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="ofm"
+ return font.read_tfm(specification.filename,specification.size)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-oti']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local lower=string.lower
+local fonts=fonts
+local constructors=fonts.constructors
+local otf=constructors.newhandler("otf")
+local otffeatures=constructors.newfeatures("otf")
+local otftables=otf.tables
+local registerotffeature=otffeatures.register
+local allocate=utilities.storage.allocate
+registerotffeature {
+ name="features",
+ description="initialization of feature handler",
+ default=true,
+}
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode=lower(value)
+ end
+end
+local function setlanguage(tfmdata,value)
+ if value then
+ local cleanvalue=lower(value)
+ local languages=otftables and otftables.languages
+ local properties=tfmdata.properties
+ if not languages then
+ properties.language=cleanvalue
+ elseif languages[value] then
+ properties.language=cleanvalue
+ else
+ properties.language="dflt"
+ end
end
end
local function setscript(tfmdata,value)
@@ -5055,9 +6566,9 @@ local utfbyte=utf.byte
local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
local type,next,tonumber,tostring=type,next,tonumber,tostring
local abs=math.abs
-local getn=table.getn
+local insert=table.insert
local lpegmatch=lpeg.match
-local reversed,concat,remove=table.reversed,table.concat,table.remove
+local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys
local ioflush=io.flush
local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive
local formatters=string.formatters
@@ -5079,7 +6590,7 @@ local report_otf=logs.reporter("fonts","otf loading")
local fonts=fonts
local otf=fonts.handlers.otf
otf.glists={ "gsub","gpos" }
-otf.version=2.743
+otf.version=2.755
otf.cache=containers.define("fonts","otf",otf.version,true)
local fontdata=fonts.hashes.identifiers
local chardata=characters and characters.data
@@ -5099,17 +6610,47 @@ local packdata=true
local syncspace=true
local forcenotdef=false
local includesubfonts=false
+local overloadkerns=false
+local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
local wildcard="*"
local default="dflt"
local fontloaderfields=fontloader.fields
local mainfields=nil
local glyphfields=nil
+local formats=fonts.formats
+formats.otf="opentype"
+formats.ttf="truetype"
+formats.ttc="truetype"
+formats.dfont="truetype"
registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end)
registerdirective("fonts.otf.loader.force",function(v) forceload=v end)
registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v end)
registerdirective("fonts.otf.loader.pack",function(v) packdata=v end)
registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
+registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end)
+function otf.fileformat(filename)
+ local leader=lower(io.loadchunk(filename,4))
+ local suffix=lower(file.suffix(filename))
+ if leader=="otto" then
+ return formats.otf,suffix=="otf"
+ elseif leader=="ttcf" then
+ return formats.ttc,suffix=="ttc"
+ elseif suffix=="ttc" then
+ return formats.ttc,true
+ elseif suffix=="dfont" then
+ return formats.dfont,true
+ else
+ return formats.ttf,suffix=="ttf"
+ end
+end
+local function otf_format(filename)
+ local format,okay=otf.fileformat(filename)
+ if not okay then
+ report_otf("font %a is actually an %a file",filename,format)
+ end
+ return format
+end
local function load_featurefile(raw,featurefile)
if featurefile and featurefile~="" then
if trace_loading then
@@ -5201,6 +6742,7 @@ local valid_fields=table.tohash {
"upos",
"use_typo_metrics",
"uwidth",
+ "validation_state",
"version",
"vert_base",
"weight",
@@ -5296,7 +6838,7 @@ end
function enhancers.register(what,action)
actions[what]=action
end
-function otf.load(filename,format,sub,featurefile)
+function otf.load(filename,sub,featurefile)
local base=file.basename(file.removesuffix(filename))
local name=file.removesuffix(base)
local attr=lfs.attributes(filename)
@@ -5394,7 +6936,7 @@ function otf.load(filename,format,sub,featurefile)
data={
size=size,
time=time,
- format=format,
+ format=otf_format(filename),
featuredata=featurefiles,
resources={
filename=resolvers.unresolve(filename),
@@ -5415,7 +6957,7 @@ function otf.load(filename,format,sub,featurefile)
},
descriptions={},
goodies={},
- helpers={
+ helpers={
tounicodelist=splitter,
tounicodetable=lpeg.Ct(splitter),
},
@@ -5460,6 +7002,9 @@ function otf.load(filename,format,sub,featurefile)
report_otf("loading from cache using hash %a",hash)
end
enhance("unpack",data,filename,nil,false)
+ if applyruntimefixes then
+ applyruntimefixes(filename,data)
+ end
enhance("add dimensions",data,filename,nil,false)
if trace_sequences then
showfeatureorder(data,filename)
@@ -5588,15 +7133,22 @@ actions["prepare glyphs"]=function(data,filename,raw)
local glyph=cidglyphs[index]
if glyph then
local unicode=glyph.unicode
+if unicode>=0x00E000 and unicode<=0x00F8FF then
+ unicode=-1
+elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then
+ unicode=-1
+elseif unicode>=0x100000 and unicode<=0x10FFFD then
+ unicode=-1
+end
local name=glyph.name or cidnames[index]
- if not unicode or unicode==-1 or unicode>=criterium then
+ if not unicode or unicode==-1 then
unicode=cidunicodes[index]
end
if unicode and descriptions[unicode] then
report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
unicode=-1
end
- if not unicode or unicode==-1 or unicode>=criterium then
+ if not unicode or unicode==-1 then
if not name then
name=format("u%06X",private)
end
@@ -5642,7 +7194,7 @@ actions["prepare glyphs"]=function(data,filename,raw)
if glyph then
local unicode=glyph.unicode
local name=glyph.name
- if not unicode or unicode==-1 or unicode>=criterium then
+ if not unicode or unicode==-1 then
unicode=private
unicodes[name]=private
if trace_private then
@@ -5664,7 +7216,6 @@ actions["prepare glyphs"]=function(data,filename,raw)
}
local altuni=glyph.altuni
if altuni then
- local d
for i=1,#altuni do
local a=altuni[i]
local u=a.unicode
@@ -5677,15 +7228,8 @@ actions["prepare glyphs"]=function(data,filename,raw)
vv={ [u]=unicode }
variants[v]=vv
end
- elseif d then
- d[#d+1]=u
- else
- d={ u }
end
end
- if d then
- duplicates[unicode]=d
- end
end
else
report_otf("potential problem: glyph %U is used but empty",index)
@@ -5703,47 +7247,45 @@ actions["check encoding"]=function(data,filename,raw)
local duplicates=resources.duplicates
local mapdata=raw.map or {}
local unicodetoindex=mapdata and mapdata.map or {}
+ local indextounicode=mapdata and mapdata.backmap or {}
local encname=lower(data.enc_name or mapdata.enc_name or "")
- local criterium=0xFFFF
+ local criterium=0xFFFF
+ local privateoffset=constructors.privateoffset
if find(encname,"unicode") then
if trace_loading then
report_otf("checking embedded unicode map %a",encname)
end
- for unicode,index in next,unicodetoindex do
- if unicode<=criterium and not descriptions[unicode] then
- local parent=indices[index]
- if not parent then
- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ local reported={}
+ for maybeunicode,index in next,unicodetoindex do
+ if descriptions[maybeunicode] then
+ else
+ local unicode=indices[index]
+ if not unicode then
+ elseif maybeunicode==unicode then
+ elseif unicode>privateoffset then
else
- local parentdescription=descriptions[parent]
- if parentdescription then
- local altuni=parentdescription.altuni
- if not altuni then
- altuni={ { unicode=parent } }
- parentdescription.altuni=altuni
- duplicates[parent]={ unicode }
+ local d=descriptions[unicode]
+ if d then
+ local c=d.copies
+ if c then
+ c[maybeunicode]=true
else
- local done=false
- for i=1,#altuni do
- if altuni[i].unicode==parent then
- done=true
- break
- end
- end
- if not done then
- altuni[#altuni+1]={ unicode=parent }
- table.insert(duplicates[parent],unicode)
- end
+ d.copies={ [maybeunicode]=true }
end
- if trace_loading then
- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
- end
- else
- report_otf("weird, unicode %U points to %U with index %H",unicode,index)
+ elseif index and not reported[index] then
+ report_otf("missing index %i",index)
+ reported[index]=true
end
end
end
end
+ for unicode,data in next,descriptions do
+ local d=data.copies
+ if d then
+ duplicates[unicode]=sortedkeys(d)
+ data.copies=nil
+ end
+ end
elseif properties.cidinfo then
report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
else
@@ -5751,6 +7293,7 @@ actions["check encoding"]=function(data,filename,raw)
end
if mapdata then
mapdata.map={}
+ mapdata.backmap={}
end
end
actions["add duplicates"]=function(data,filename,raw)
@@ -5761,28 +7304,37 @@ actions["add duplicates"]=function(data,filename,raw)
local indices=resources.indices
local duplicates=resources.duplicates
for unicode,d in next,duplicates do
- for i=1,#d do
- local u=d[i]
- if not descriptions[u] then
- local description=descriptions[unicode]
- local duplicate=table.copy(description)
- duplicate.comment=format("copy of U+%05X",unicode)
- descriptions[u]=duplicate
- local n=0
- for _,description in next,descriptions do
- if kerns then
- local kerns=description.kerns
- for _,k in next,kerns do
- local ku=k[unicode]
- if ku then
- k[u]=ku
- n=n+1
+ local nofduplicates=#d
+ if nofduplicates>4 then
+ if trace_loading then
+ report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
+ end
+ else
+ for i=1,nofduplicates do
+ local u=d[i]
+ if not descriptions[u] then
+ local description=descriptions[unicode]
+ local n=0
+ for _,description in next,descriptions do
+ if kerns then
+ local kerns=description.kerns
+ for _,k in next,kerns do
+ local ku=k[unicode]
+ if ku then
+ k[u]=ku
+ n=n+1
+ end
end
end
end
- end
- if trace_loading then
- report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ if u>0 then
+ local duplicate=table.copy(description)
+ duplicate.comment=format("copy of U+%05X",unicode)
+ descriptions[u]=duplicate
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ end
+ end
end
end
end
@@ -5935,14 +7487,6 @@ local g_directions={
gsub_reversecontextchain=-1,
gpos_reversecontextchain=-1,
}
-local function supported(features)
- for i=1,#features do
- if features[i].ismac then
- return false
- end
- end
- return true
-end
actions["reorganize subtables"]=function(data,filename,raw)
local resources=data.resources
local sequences={}
@@ -5956,7 +7500,6 @@ actions["reorganize subtables"]=function(data,filename,raw)
for k=1,#dw do
local gk=dw[k]
local features=gk.features
- if not features or supported(features) then
local typ=gk.type
local chain=g_directions[typ] or 0
local subtables=gk.subtables
@@ -5986,10 +7529,16 @@ actions["reorganize subtables"]=function(data,filename,raw)
report_otf("skipping weird lookup number %s",k)
elseif features then
local f={}
+ local o={}
for i=1,#features do
local df=features[i]
local tag=strip(lower(df.tag))
- local ft=f[tag] if not ft then ft={} f[tag]=ft end
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ o[#o+1]=tag
+ end
local dscripts=df.scripts
for i=1,#dscripts do
local d=dscripts[i]
@@ -6009,6 +7558,7 @@ actions["reorganize subtables"]=function(data,filename,raw)
subtables=subtables,
markclass=markclass,
features=f,
+ order=o,
}
else
lookups[name]={
@@ -6019,7 +7569,6 @@ actions["reorganize subtables"]=function(data,filename,raw)
markclass=markclass,
}
end
- end
end
end
end
@@ -6390,74 +7939,93 @@ actions["merge kern classes"]=function(data,filename,raw)
local resources=data.resources
local unicodes=resources.unicodes
local splitter=data.helpers.tounicodetable
+ local ignored=0
+ local blocked=0
for gp=1,#gposlist do
local gpos=gposlist[gp]
local subtables=gpos.subtables
if subtables then
+ local first_done={}
+ local split={}
for s=1,#subtables do
local subtable=subtables[s]
local kernclass=subtable.kernclass
+ local lookup=subtable.lookup or subtable.name
if kernclass then
- local split={}
- for k=1,#kernclass do
- local kcl=kernclass[k]
- local firsts=kcl.firsts
- local seconds=kcl.seconds
- local offsets=kcl.offsets
- local lookups=kcl.lookup
- if type(lookups)~="table" then
- lookups={ lookups }
- end
- for n,s in next,firsts do
- split[s]=split[s] or lpegmatch(splitter,s)
- end
- local maxseconds=0
- for n,s in next,seconds do
- if n>maxseconds then
- maxseconds=n
- end
- split[s]=split[s] or lpegmatch(splitter,s)
- end
- for l=1,#lookups do
- local lookup=lookups[l]
- for fk=1,#firsts do
- local fv=firsts[fk]
- local splt=split[fv]
- if splt then
- local extrakerns={}
- local baseoffset=(fk-1)*maxseconds
- for sk=2,maxseconds do
- local sv=seconds[sk]
- local splt=split[sv]
- if splt then
- local offset=offsets[baseoffset+sk]
- if offset then
- for i=1,#splt do
- extrakerns[splt[i]]=offset
- end
- end
+ if #kernclass>0 then
+ kernclass=kernclass[1]
+ lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup
+ report_otf("fixing kernclass table of lookup %a",lookup)
+ end
+ local firsts=kernclass.firsts
+ local seconds=kernclass.seconds
+ local offsets=kernclass.offsets
+ for n,s in next,firsts do
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ local maxseconds=0
+ for n,s in next,seconds do
+ if n>maxseconds then
+ maxseconds=n
+ end
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ for fk=1,#firsts do
+ local fv=firsts[fk]
+ local splt=split[fv]
+ if splt then
+ local extrakerns={}
+ local baseoffset=(fk-1)*maxseconds
+ for sk=2,maxseconds do
+ local sv=seconds[sk]
+ local splt=split[sv]
+ if splt then
+ local offset=offsets[baseoffset+sk]
+ if offset then
+ for i=1,#splt do
+ extrakerns[splt[i]]=offset
end
end
- for i=1,#splt do
- local first_unicode=splt[i]
- local description=descriptions[first_unicode]
- if description then
- local kerns=description.kerns
- if not kerns then
- kerns={}
- description.kerns=kerns
- end
- local lookupkerns=kerns[lookup]
- if not lookupkerns then
- lookupkerns={}
- kerns[lookup]=lookupkerns
+ end
+ end
+ for i=1,#splt do
+ local first_unicode=splt[i]
+ if first_done[first_unicode] then
+ report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode)
+ blocked=blocked+1
+ else
+ first_done[first_unicode]=true
+ local description=descriptions[first_unicode]
+ if description then
+ local kerns=description.kerns
+ if not kerns then
+ kerns={}
+ description.kerns=kerns
+ end
+ local lookupkerns=kerns[lookup]
+ if not lookupkerns then
+ lookupkerns={}
+ kerns[lookup]=lookupkerns
+ end
+ if overloadkerns then
+ for second_unicode,kern in next,extrakerns do
+ lookupkerns[second_unicode]=kern
end
+ else
for second_unicode,kern in next,extrakerns do
- lookupkerns[second_unicode]=kern
+ local k=lookupkerns[second_unicode]
+ if not k then
+ lookupkerns[second_unicode]=kern
+ elseif k~=kern then
+ if trace_loading then
+ report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern)
+ end
+ ignored=ignored+1
+ end
end
- elseif trace_loading then
- report_otf("no glyph data for %U",first_unicode)
end
+ elseif trace_loading then
+ report_otf("no glyph data for %U",first_unicode)
end
end
end
@@ -6468,6 +8036,12 @@ actions["merge kern classes"]=function(data,filename,raw)
end
end
end
+ if ignored>0 then
+ report_otf("%s kern overloads ignored",ignored)
+ end
+ if blocked>0 then
+ report_otf("%s succesive kerns blocked",blocked)
+ end
end
end
actions["check glyphs"]=function(data,filename,raw)
@@ -6491,6 +8065,11 @@ actions["check metadata"]=function(data,filename,raw)
ttftables[i].data="deleted"
end
end
+ if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
+ local name=file.nameonly(filename)
+ metadata.fontname="bad-fontname-"..name
+ metadata.fullname="bad-fullname-"..name
+ end
end
actions["cleanup tables"]=function(data,filename,raw)
data.resources.indices=nil
@@ -6681,10 +8260,19 @@ local function copytotfm(data,cache_id)
end
end
end
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname
+ local fullname=metadata.fullname or fontname
+ local units=metadata.units_per_em or 1000
+ if units==0 then
+ units=1000
+ metadata.units_per_em=1000
+ report_otf("changing %a units to %a",0,units)
+ end
local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced")
local charwidth=pfminfo.avgwidth
- local italicangle=metadata.italicangle
local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight
+ local italicangle=metadata.italicangle
properties.monospaced=monospaced
parameters.italicangle=italicangle
parameters.charwidth=charwidth
@@ -6713,14 +8301,6 @@ local function copytotfm(data,cache_id)
end
end
spaceunits=tonumber(spaceunits) or 500
- local filename=constructors.checkedfilename(resources)
- local fontname=metadata.fontname
- local fullname=metadata.fullname or fontname
- local units=metadata.units_per_em or 1000
- if units==0 then
- units=1000
- metadata.units_per_em=1000
- end
parameters.slant=0
parameters.space=spaceunits
parameters.space_stretch=units/2
@@ -6729,10 +8309,10 @@ local function copytotfm(data,cache_id)
parameters.quad=units
if spaceunits<2*units/5 then
end
- if italicangle then
+ if italicangle and italicangle~=0 then
parameters.italicangle=italicangle
parameters.italicfactor=math.cos(math.rad(90+italicangle))
- parameters.slant=- math.round(math.tan(italicangle*math.pi/180))
+ parameters.slant=- math.tan(italicangle*math.pi/180)
end
if monospaced then
parameters.space_stretch=0
@@ -6759,7 +8339,7 @@ local function copytotfm(data,cache_id)
parameters.units=units
properties.space=spacer
properties.encodingbytes=2
- properties.format=data.format or fonts.formats[filename] or "opentype"
+ properties.format=data.format or otf_format(filename) or formats.otf
properties.noglyphnames=true
properties.filename=filename
properties.fontname=fontname
@@ -6784,10 +8364,27 @@ local function otftotfm(specification)
local name=specification.name
local sub=specification.sub
local filename=specification.filename
- local format=specification.format
local features=specification.features.normal
- local rawdata=otf.load(filename,format,sub,features and features.featurefile)
+ local rawdata=otf.load(filename,sub,features and features.featurefile)
if rawdata and next(rawdata) then
+ local descriptions=rawdata.descriptions
+ local duplicates=rawdata.resources.duplicates
+ if duplicates then
+ local nofduplicates,nofduplicated=0,0
+ for parent,list in next,duplicates do
+ for i=1,#list do
+ local unicode=list[i]
+ if not descriptions[unicode] then
+ descriptions[unicode]=descriptions[parent]
+ nofduplicated=nofduplicated+1
+ end
+ end
+ nofduplicates=nofduplicates+#list
+ end
+ if trace_otf and nofduplicated~=nofduplicates then
+ report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
+ end
+ end
rawdata.lookuphash={}
tfmdata=copytotfm(rawdata,cache_id)
if tfmdata and next(tfmdata) then
@@ -6868,41 +8465,33 @@ function otf.collectlookups(rawdata,kind,script,language)
end
return nil,nil
end
-local function check_otf(forced,specification,suffix,what)
+local function check_otf(forced,specification,suffix)
local name=specification.name
if forced then
- name=file.addsuffix(name,suffix,true)
+ name=specification.forcedname
end
local fullname=findbinfile(name,suffix) or ""
if fullname=="" then
fullname=fonts.names.getfilename(name,suffix) or ""
end
- if fullname~="" then
+ if fullname~="" and not fonts.names.ignoredfile(fullname) then
specification.filename=fullname
- specification.format=what
return read_from_otf(specification)
end
end
-local function opentypereader(specification,suffix,what)
+local function opentypereader(specification,suffix)
local forced=specification.forced or ""
- if forced=="otf" then
- return check_otf(true,specification,forced,"opentype")
- elseif forced=="ttf" or forced=="ttc" or forced=="dfont" then
- return check_otf(true,specification,forced,"truetype")
+ if formats[forced] then
+ return check_otf(true,specification,forced)
else
- return check_otf(false,specification,suffix,what)
+ return check_otf(false,specification,suffix)
end
end
-readers.opentype=opentypereader
-local formats=fonts.formats
-formats.otf="opentype"
-formats.ttf="truetype"
-formats.ttc="truetype"
-formats.dfont="truetype"
-function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end
-function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end
-function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end
-function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end
+readers.opentype=opentypereader
+function readers.otf (specification) return opentypereader(specification,"otf") end
+function readers.ttf (specification) return opentypereader(specification,"ttf") end
+function readers.ttc (specification) return opentypereader(specification,"ttf") end
+function readers.dfont(specification) return opentypereader(specification,"ttf") end
function otf.scriptandlanguage(tfmdata,attr)
local properties=tfmdata.properties
return properties.script or "dflt",properties.language or "dflt"
@@ -7415,8 +9004,9 @@ basemethods.shared={
basemethod="independent"
local function featuresinitializer(tfmdata,value)
if true then
- local t=trace_preparing and os.clock()
+ local starttime=trace_preparing and os.clock()
local features=tfmdata.shared.features
+ local fullname=trace_preparing and tfmdata.properties.fullname
if features then
applybasemethod("initializehashes",tfmdata)
local collectlookups=otf.collectlookups
@@ -7426,26 +9016,34 @@ local function featuresinitializer(tfmdata,value)
local language=properties.language
local basesubstitutions=rawdata.resources.features.gsub
local basepositionings=rawdata.resources.features.gpos
- if basesubstitutions then
- for feature,data in next,basesubstitutions do
- local value=features[feature]
- if value then
- local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- if basepositionings then
- for feature,data in next,basepositionings do
- local value=features[feature]
- if value then
- local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
- registerbasefeature(feature,value)
+ if basesubstitutions or basepositionings then
+ local sequences=tfmdata.resources.sequences
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local sfeatures=sequence.features
+ if sfeatures then
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local feature=order[i]
+ if features[feature] then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if not validlookups then
+ elseif basesubstitutions and basesubstitutions[feature] then
+ if trace_preparing then
+ report_prepare("filtering base feature %a for %a",feature,fullname)
+ end
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ elseif basepositionings and basepositionings[feature] then
+ if trace_preparing then
+ report_prepare("filtering base feature %a for %a",feature,fullname)
+ end
+ applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
end
end
end
@@ -7453,7 +9051,7 @@ local function featuresinitializer(tfmdata,value)
registerbasehash(tfmdata)
end
if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname)
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname)
end
end
end
@@ -7549,9 +9147,9 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
return 0,0
end
end
-function injections.setmark(start,base,factor,rlmode,ba,ma,index)
- local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
- local bound=base[a_markbase]
+function injections.setmark(start,base,factor,rlmode,ba,ma)
+ local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
+ local bound=base[a_markbase]
local index=1
if bound then
local mb=marks[bound]
@@ -7803,6 +9401,11 @@ function injections.handler(head,where,keep)
else
n.xoffset=p.xoffset-d[1]
end
+ local w=n.width
+ if w~=0 then
+ insert_node_before(head,n,newkern(-w/2))
+ insert_node_after(head,n,newkern(-w/2))
+ end
end
if mk[p] then
n.yoffset=p.yoffset+d[2]
@@ -7944,6 +9547,7 @@ analyzers.useunicodemarks=false
local a_state=attributes.private('state')
local nodecodes=nodes.nodecodes
local glyph_code=nodecodes.glyph
+local disc_code=nodecodes.disc
local math_code=nodecodes.math
local traverse_id=node.traverse_id
local traverse_node_list=node.traverse
@@ -7976,6 +9580,11 @@ local features={
medi=s_medi,
fina=s_fina,
isol=s_isol,
+ rphf=s_rphf,
+ half=s_half,
+ pref=s_pref,
+ blwf=s_blwf,
+ pstf=s_pstf,
}
analyzers.states=states
analyzers.features=features
@@ -8010,7 +9619,7 @@ function analyzers.setstate(head,font)
first,last,n=nil,nil,0
end
elseif id==disc_code then
- current[a_state]=s_midi
+ current[a_state]=s_medi
last=current
else
if first and first==last then
@@ -8062,7 +9671,7 @@ local function analyzeprocessor(head,font,attr)
end
registerotffeature {
name="analyze",
- description="analysis of (for instance) character classes",
+ description="analysis of character classes",
default=true,
initializers={
node=analyzeinitializer,
@@ -8339,6 +9948,7 @@ local default="dflt"
local nodecodes=nodes.nodecodes
local whatcodes=nodes.whatcodes
local glyphcodes=nodes.glyphcodes
+local disccodes=nodes.disccodes
local glyph_code=nodecodes.glyph
local glue_code=nodecodes.glue
local disc_code=nodecodes.disc
@@ -8346,6 +9956,7 @@ local whatsit_code=nodecodes.whatsit
local math_code=nodecodes.math
local dir_code=whatcodes.dir
local localpar_code=whatcodes.localpar
+local discretionary_code=disccodes.discretionary
local ligature_code=glyphcodes.ligature
local privateattribute=attributes.private
local a_state=privateattribute('state')
@@ -8593,13 +10204,13 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives
end
end
end
-local function multiple_glyphs(head,start,multiple)
+local function multiple_glyphs(head,start,multiple,ignoremarks)
local nofmultiples=#multiple
if nofmultiples>0 then
start.char=multiple[1]
if nofmultiples>1 then
local sn=start.next
- for k=2,nofmultiples do
+ for k=2,nofmultiples do
local n=copy_node(start)
n.char=multiple[k]
n.next=sn
@@ -8634,11 +10245,11 @@ function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence
end
return head,start,true
end
-function handlers.gsub_multiple(head,start,kind,lookupname,multiple)
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
if trace_multiples then
logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
end
- return multiple_glyphs(head,start,multiple)
+ return multiple_glyphs(head,start,multiple,sequence.flags[1])
end
function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
local s,stop,discfound=start.next,nil,false
@@ -8702,9 +10313,9 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
break
end
end
- if stop then
- local lig=ligature.ligature
- if lig then
+ local lig=ligature.ligature
+ if lig then
+ if stop then
if trace_ligatures then
local stopchar=stop.char
head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
@@ -8714,7 +10325,13 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
end
return head,start,true
else
+ start.char=lig
+ if trace_ligatures then
+ logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
+ end
+ return head,start,true
end
+ else
end
end
return head,start,false
@@ -8871,7 +10488,7 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
if al[anchor] then
local ma=markanchors[anchor]
if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -8972,7 +10589,6 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
prev=snext
snext=snext.next
else
- local krn=kerns[nextchar]
if not krn then
elseif type(krn)=="table" then
if lookuptype=="pair" then
@@ -9045,34 +10661,6 @@ function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,loo
return head,start,false
end
end
-local function delete_till_stop(start,stop,ignoremarks)
- local n=1
- if start==stop then
- elseif ignoremarks then
- repeat
- local next=start.next
- if not marks[next.char] then
- local components=next.components
- if components then
- flush_node_list(components)
- end
- delete_node(start,next)
- end
- n=n+1
- until next==stop
- else
- repeat
- local next=start.next
- local components=next.components
- if components then
- flush_node_list(components)
- end
- delete_node(start,next)
- n=n+1
- until next==stop
- end
- return n
-end
function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
local current=start
local subtables=currentlookup.subtables
@@ -9112,7 +10700,6 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
end
chainmores.gsub_single=chainprocs.gsub_single
function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- delete_till_stop(start,stop)
local startchar=start.char
local subtables=currentlookup.subtables
local lookupname=subtables[1]
@@ -9131,7 +10718,7 @@ function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,
if trace_multiples then
logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
end
- return multiple_glyphs(head,start,replacements)
+ return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
end
end
return head,start,false
@@ -9412,7 +10999,7 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
if al[anchor] then
local ma=markanchors[anchor]
if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -9515,6 +11102,7 @@ function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lo
end
return head,start,false
end
+chainmores.gpos_single=chainprocs.gpos_single
function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
local snext=start.next
if snext then
@@ -9583,6 +11171,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
end
return head,start,false
end
+chainmores.gpos_pair=chainprocs.gpos_pair
local function show_skip(kind,chainname,char,ck,class)
if ck[9] then
logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
@@ -9799,7 +11388,11 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if chainlookup then
local cp=chainprocs[chainlookup.type]
if cp then
- head,start,done=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ local ok
+ head,start,ok=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ if ok then
+ done=true
+ end
else
logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
end
@@ -9826,19 +11419,24 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
local chainlookupname=chainlookups[i]
- local chainlookup=lookuptable[chainlookupname]
- local cp=chainlookup and chainmores[chainlookup.type]
- if cp then
- local ok,n
- head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
- if ok then
- done=true
- i=i+(n or 1)
- else
+ local chainlookup=lookuptable[chainlookupname]
+ if not chainlookup then
+ i=i+1
+ else
+ local cp=chainmores[chainlookup.type]
+ if not cp then
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
i=i+1
+ else
+ local ok,n
+ head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ if ok then
+ done=true
+ i=i+(n or 1)
+ else
+ i=i+1
+ end
end
- else
- i=i+1
end
if start then
start=start.next
@@ -9920,14 +11518,20 @@ local autofeatures=fonts.analyzers.features
local function initialize(sequence,script,language,enabled)
local features=sequence.features
if features then
- for kind,scripts in next,features do
- local valid=enabled[kind]
- if valid then
- local languages=scripts[script] or scripts[wildcard]
- if languages and (languages[language] or languages[wildcard]) then
- return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local kind=order[i]
+ local valid=enabled[kind]
+ if valid then
+ local scripts=features[kind]
+ local languages=scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ end
end
end
+ else
end
end
return false
@@ -9954,12 +11558,12 @@ function otf.dataset(tfmdata,font)
}
rs[language]=rl
local sequences=tfmdata.resources.sequences
-for s=1,#sequences do
- local v=enabled and initialize(sequences[s],script,language,enabled)
- if v then
- rl[#rl+1]=v
- end
-end
+ for s=1,#sequences do
+ local v=enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1]=v
+ end
+ end
end
return rl
end
@@ -9985,227 +11589,404 @@ local function featuresprocessor(head,font,attr)
local done=false
local datasets=otf.dataset(tfmdata,font,attr)
local dirstack={}
-for s=1,#datasets do
- local dataset=datasets[s]
- featurevalue=dataset[1]
- local sequence=dataset[5]
- local rlparmode=0
- local topstack=0
- local success=false
- local attribute=dataset[2]
- local chain=dataset[3]
- local typ=sequence.type
- local subtables=sequence.subtables
- if chain<0 then
- local handler=handlers[typ]
- local start=find_node_tail(head)
+ for s=1,#datasets do
+ local dataset=datasets[s]
+ featurevalue=dataset[1]
+ local sequence=dataset[5]
+ local rlparmode=0
+ local topstack=0
+ local success=false
+ local attribute=dataset[2]
+ local chain=dataset[3]
+ local typ=sequence.type
+ local subtables=sequence.subtables
+ if chain<0 then
+ local handler=handlers[typ]
+ local start=find_node_tail(head)
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=a==attr
+ else
+ a=true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=start.prev end
+ else
+ start=start.prev
+ end
+ else
+ start=start.prev
+ end
+ else
+ start=start.prev
+ end
+ end
+ else
+ local handler=handlers[typ]
+ local ns=#subtables
+ local start=head
+ rlmode=0
+ if ns==1 then
+ local lookupname=subtables[1]
+ local lookupcache=lookuphash[lookupname]
+ if not lookupcache then
+ report_missing_cache(typ,lookupname)
+ else
+ local function subrun(start)
+ local head=start
+ local done=false
+ while start do
+ local id=start.id
+ if id==glyph_code and start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done=true
+ end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ else
+ start=start.next
+ end
+ end
+ if done then
+ success=true
+ return head
+ end
+ end
+ local function kerndisc(disc)
+ local prev=disc.prev
+ local next=disc.next
+ if prev and next then
+ prev.next=next
+ local a=prev[0]
+ if a then
+ a=(a==attr) and (not attribute or prev[a_state]==attribute)
+ else
+ a=not attribute or prev[a_state]==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[prev.char]
+ if lookupmatch then
+ local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done=true
+ success=true
+ end
+ end
+ end
+ prev.next=disc
+ end
+ return next
+ end
while start do
local id=start.id
if id==glyph_code then
if start.font==font and start.subtype<256 then
local a=start[0]
if a then
- a=a==attr
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
else
- a=true
+ a=not attribute or start[a_state]==attribute
end
if a then
- for i=1,#subtables do
- local lookupname=subtables[i]
- local lookupcache=lookuphash[lookupname]
- if lookupcache then
- local lookupmatch=lookupcache[start.char]
- if lookupmatch then
- head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if success then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success=true
end
end
- if start then start=start.prev end
+ if start then start=start.next end
else
- start=start.prev
+ start=start.next
end
else
- start=start.prev
+ start=start.next
+ end
+ elseif id==disc_code then
+ if start.subtype==discretionary_code then
+ local pre=start.pre
+ if pre then
+ local new=subrun(pre)
+ if new then start.pre=new end
+ end
+ local post=start.post
+ if post then
+ local new=subrun(post)
+ if new then start.post=new end
+ end
+ local replace=start.replace
+ if replace then
+ local new=subrun(replace)
+ if new then start.replace=new end
+ end
+elseif typ=="gpos_single" or typ=="gpos_pair" then
+ kerndisc(start)
+ end
+ start=start.next
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
end
+ start=start.next
+ elseif id==math_code then
+ start=end_of_math(start).next
else
- start=start.prev
+ start=start.next
end
end
- else
- local handler=handlers[typ]
- local ns=#subtables
- local start=head
- rlmode=0
- if ns==1 then
- local lookupname=subtables[1]
- local lookupcache=lookuphash[lookupname]
- if not lookupcache then
- report_missing_cache(typ,lookupname)
- else
- while start do
- local id=start.id
- if id==glyph_code then
- if start.font==font and start.subtype<256 then
- local a=start[0]
- if a then
- a=(a==attr) and (not attribute or start[a_state]==attribute)
- else
- a=not attribute or start[a_state]==attribute
- end
- if a then
- local lookupmatch=lookupcache[start.char]
- if lookupmatch then
- local ok
- head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- success=true
- end
+ end
+ else
+ local function subrun(start)
+ local head=start
+ local done=false
+ while start do
+ local id=start.id
+ if id==glyph_code and start.id==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done=true
+ break
+ elseif not start then
+ break
end
- if start then start=start.next end
- else
- start=start.next
end
- elseif id==math_code then
- start=end_of_math(start).next
else
- start=start.next
+ report_missing_cache(typ,lookupname)
end
- elseif id==whatsit_code then
- local subtype=start.subtype
- if subtype==dir_code then
- local dir=start.dir
- if dir=="+TRT" or dir=="+TLT" then
- topstack=topstack+1
- dirstack[topstack]=dir
- elseif dir=="-TRT" or dir=="-TLT" then
- topstack=topstack-1
- end
- local newdir=dirstack[topstack]
- if newdir=="+TRT" then
- rlmode=-1
- elseif newdir=="+TLT" then
- rlmode=1
- else
- rlmode=rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- elseif subtype==localpar_code then
- local dir=start.dir
- if dir=="TRT" then
- rlparmode=-1
- elseif dir=="TLT" then
- rlparmode=1
- else
- rlparmode=0
- end
- rlmode=rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ else
+ start=start.next
+ end
+ end
+ if done then
+ success=true
+ return head
+ end
+ end
+ local function kerndisc(disc)
+ local prev=disc.prev
+ local next=disc.next
+ if prev and next then
+ prev.next=next
+ local a=prev[0]
+ if a then
+ a=(a==attr) and (not attribute or prev[a_state]==attribute)
+ else
+ a=not attribute or prev[a_state]==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[prev.char]
+ if lookupmatch then
+ local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done=true
+ break
end
end
- start=start.next
- elseif id==math_code then
- start=end_of_math(start).next
else
- start=start.next
+ report_missing_cache(typ,lookupname)
end
end
end
- else
- while start do
- local id=start.id
- if id==glyph_code then
- if start.font==font and start.subtype<256 then
- local a=start[0]
- if a then
- a=(a==attr) and (not attribute or start[a_state]==attribute)
- else
- a=not attribute or start[a_state]==attribute
- end
- if a then
- for i=1,ns do
- local lookupname=subtables[i]
- local lookupcache=lookuphash[lookupname]
- if lookupcache then
- local lookupmatch=lookupcache[start.char]
- if lookupmatch then
- local ok
- head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- success=true
- break
- elseif not start then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
+ prev.next=disc
+ end
+ return next
+ end
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success=true
+ break
+ elseif not start then
+ break
end
end
- if start then start=start.next end
- else
- start=start.next
- end
- else
- start=start.next
- end
- elseif id==whatsit_code then
- local subtype=start.subtype
- if subtype==dir_code then
- local dir=start.dir
- if dir=="+TRT" or dir=="+TLT" then
- topstack=topstack+1
- dirstack[topstack]=dir
- elseif dir=="-TRT" or dir=="-TLT" then
- topstack=topstack-1
- end
- local newdir=dirstack[topstack]
- if newdir=="+TRT" then
- rlmode=-1
- elseif newdir=="+TLT" then
- rlmode=1
- else
- rlmode=rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- elseif subtype==localpar_code then
- local dir=start.dir
- if dir=="TRT" then
- rlparmode=-1
- elseif dir=="TLT" then
- rlparmode=1
else
- rlparmode=0
- end
- rlmode=rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ report_missing_cache(typ,lookupname)
end
end
- start=start.next
- elseif id==math_code then
- start=end_of_math(start).next
+ if start then start=start.next end
else
start=start.next
end
+ else
+ start=start.next
+ end
+ elseif id==disc_code then
+ if start.subtype==discretionary_code then
+ local pre=start.pre
+ if pre then
+ local new=subrun(pre)
+ if new then start.pre=new end
+ end
+ local post=start.post
+ if post then
+ local new=subrun(post)
+ if new then start.post=new end
+ end
+ local replace=start.replace
+ if replace then
+ local new=subrun(replace)
+ if new then start.replace=new end
+ end
+elseif typ=="gpos_single" or typ=="gpos_pair" then
+ kerndisc(start)
+ end
+ start=start.next
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
end
+ start=start.next
+ elseif id==math_code then
+ start=end_of_math(start).next
+ else
+ start=start.next
end
end
- if success then
- done=true
- end
- if trace_steps then
- registerstep(head)
- end
+ end
+ end
+ if success then
+ done=true
+ end
+ if trace_steps then
+ registerstep(head)
+ end
end
return head,done
end
@@ -10809,6 +12590,14 @@ local function packdata(data)
features[script]=pack_normal(feature)
end
end
+ local order=sequence.order
+ if order then
+ sequence.order=pack_indexed(order)
+ end
+ local markclass=sequence.markclass
+ if markclass then
+ sequence.markclass=pack_boolean(markclass)
+ end
end
end
local lookups=resources.lookups
@@ -11221,6 +13010,20 @@ local function unpackdata(data)
end
end
end
+ local order=feature.order
+ if order then
+ local tv=tables[order]
+ if tv then
+ feature.order=tv
+ end
+ end
+ local markclass=feature.markclass
+ if markclass then
+ local tv=tables[markclass]
+ if tv then
+ feature.markclass=tv
+ end
+ end
end
end
local lookups=resources.lookups
@@ -11315,6 +13118,7 @@ if not modules then modules={} end modules ['font-def']={
local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub
local tostring,next=tostring,next
local lpegmatch=lpeg.match
+local suffixonly,removesuffix=file.suffix,file.removesuffix
local allocate=utilities.storage.allocate
local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end)
local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end)
@@ -11362,7 +13166,7 @@ addlookup("file")
addlookup("name")
addlookup("spec")
local function getspecification(str)
- return lpegmatch(splitter,str)
+ return lpegmatch(splitter,str or "")
end
definers.getspecification=getspecification
function definers.registersplit(symbol,action,verbosename)
@@ -11404,10 +13208,11 @@ definers.resolvers=definers.resolvers or {}
local resolvers=definers.resolvers
function resolvers.file(specification)
local name=resolvefile(specification.name)
- local suffix=file.suffix(name)
+ local suffix=lower(suffixonly(name))
if fonts.formats[suffix] then
specification.forced=suffix
- specification.name=file.removesuffix(name)
+ specification.forcedname=name
+ specification.name=removesuffix(name)
else
specification.name=name
end
@@ -11419,10 +13224,11 @@ function resolvers.name(specification)
if resolved then
specification.resolved=resolved
specification.sub=sub
- local suffix=file.suffix(resolved)
+ local suffix=lower(suffixonly(resolved))
if fonts.formats[suffix] then
specification.forced=suffix
- specification.name=file.removesuffix(resolved)
+ specification.forcedname=resolved
+ specification.name=removesuffix(resolved)
else
specification.name=resolved
end
@@ -11438,8 +13244,9 @@ function resolvers.spec(specification)
if resolved then
specification.resolved=resolved
specification.sub=sub
- specification.forced=file.suffix(resolved)
- specification.name=file.removesuffix(resolved)
+ specification.forced=lower(suffixonly(resolved))
+ specification.forcedname=resolved
+ specification.name=removesuffix(resolved)
end
else
resolvers.name(specification)
@@ -11454,8 +13261,7 @@ function definers.resolve(specification)
end
if specification.forced=="" then
specification.forced=nil
- else
- specification.forced=specification.forced
+ specification.forcedname=nil
end
specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification))
if specification.sub and specification.sub~="" then
@@ -11500,7 +13306,7 @@ function definers.loadfont(specification)
if not tfmdata then
local forced=specification.forced or ""
if forced~="" then
- local reader=readers[lower(forced)]
+ local reader=readers[lower(forced)]
tfmdata=reader and reader(specification)
if not tfmdata then
report_defining("forced type %a of %a not found",forced,specification.name)
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-otn.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-otn.lua
new file mode 100644
index 00000000000..068f0a9b926
--- /dev/null
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-otn.lua
@@ -0,0 +1,2848 @@
+if not modules then modules = { } end modules ['font-otn'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- preprocessors = { "nodes" }
+
+-- this is still somewhat preliminary and it will get better in due time;
+-- much functionality could only be implemented thanks to the husayni font
+-- of Idris Samawi Hamid to who we dedicate this module.
+
+-- in retrospect it always looks easy but believe it or not, it took a lot
+-- of work to get proper open type support done: buggy fonts, fuzzy specs,
+-- special made testfonts, many skype sessions between taco, idris and me,
+-- torture tests etc etc ... unfortunately the code does not show how much
+-- time it took ...
+
+-- todo:
+--
+-- kerning is probably not yet ok for latin around dics nodes (interesting challenge)
+-- extension infrastructure (for usage out of context)
+-- sorting features according to vendors/renderers
+-- alternative loop quitters
+-- check cursive and r2l
+-- find out where ignore-mark-classes went
+-- default features (per language, script)
+-- handle positions (we need example fonts)
+-- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
+-- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
+-- remove some optimizations (when I have a faster machine)
+--
+-- maybe redo the lot some way (more context specific)
+
+--[[ldx--
+This module is a bit more split up that I'd like but since we also want to test
+with plain it has to be so. This module is part of
+and discussion about improvements and functionality mostly happens on the
+ mailing list.
+
+The specification of OpenType is kind of vague. Apart from a lack of a proper
+free specifications there's also the problem that Microsoft and Adobe
+may have their own interpretation of how and in what order to apply features.
+In general the Microsoft website has more detailed specifications and is a
+better reference. There is also some information in the FontForge help files.
+
+Because there is so much possible, fonts might contain bugs and/or be made to
+work with certain rederers. These may evolve over time which may have the side
+effect that suddenly fonts behave differently.
+
+After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
+implementation. Of course all errors are mine and of course the code can be
+improved. There are quite some optimizations going on here and processing speed
+is currently acceptable. Not all functions are implemented yet, often because I
+lack the fonts for testing. Many scripts are not yet supported either, but I will
+look into them as soon as users ask for it.
+
+Because there are different interpretations possible, I will extend the code
+with more (configureable) variants. I can also add hooks for users so that they can
+write their own extensions.
+
+Glyphs are indexed not by unicode but in their own way. This is because there is no
+relationship with unicode at all, apart from the fact that a font might cover certain
+ranges of characters. One character can have multiple shapes. However, at the
+ end we use unicode so and all extra glyphs are mapped into a private
+space. This is needed because we need to access them and has to include
+then in the output eventually.
+
+The raw table as it coms from gets reorganized in to fit out needs.
+In that table is packed (similar tables are shared) and cached on disk
+so that successive runs can use the optimized table (after loading the table is
+unpacked). The flattening code used later is a prelude to an even more compact table
+format (and as such it keeps evolving).
+
+This module is sparsely documented because it is a moving target. The table format
+of the reader changes and we experiment a lot with different methods for supporting
+features.
+
+As with the code, we may decide to store more information in the
+ table.
+
+Incrementing the version number will force a re-cache. We jump the number by one
+when there's a fix in the library or code that
+results in different tables.
+--ldx]]--
+
+-- action handler chainproc chainmore comment
+--
+-- gsub_single ok ok ok
+-- gsub_multiple ok ok not implemented yet
+-- gsub_alternate ok ok not implemented yet
+-- gsub_ligature ok ok ok
+-- gsub_context ok --
+-- gsub_contextchain ok --
+-- gsub_reversecontextchain ok --
+-- chainsub -- ok
+-- reversesub -- ok
+-- gpos_mark2base ok ok
+-- gpos_mark2ligature ok ok
+-- gpos_mark2mark ok ok
+-- gpos_cursive ok untested
+-- gpos_single ok ok
+-- gpos_pair ok ok
+-- gpos_context ok --
+-- gpos_contextchain ok --
+--
+-- todo: contextpos and contextsub and class stuff
+--
+-- actions:
+--
+-- handler : actions triggered by lookup
+-- chainproc : actions triggered by contextual lookup
+-- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
+--
+-- remark: the 'not implemented yet' variants will be done when we have fonts that use them
+-- remark: we need to check what to do with discretionaries
+
+-- We used to have independent hashes for lookups but as the tags are unique
+-- we now use only one hash. If needed we can have multiple again but in that
+-- case I will probably prefix (i.e. rename) the lookups in the cached font file.
+
+-- Todo: make plugin feature that operates on char/glyphnode arrays
+
+local concat, insert, remove = table.concat, table.insert, table.remove
+local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
+local type, next, tonumber, tostring = type, next, tonumber, tostring
+local lpegmatch = lpeg.match
+local random = math.random
+local formatters = string.formatters
+
+local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
+
+local registertracker = trackers.register
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+
+local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end)
+local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
+local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
+local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
+local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
+local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
+local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
+local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
+local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
+local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
+local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
+local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
+local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end)
+local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
+local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
+local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
+
+local report_direct = logs.reporter("fonts","otf direct")
+local report_subchain = logs.reporter("fonts","otf subchain")
+local report_chain = logs.reporter("fonts","otf chain")
+local report_process = logs.reporter("fonts","otf process")
+local report_prepare = logs.reporter("fonts","otf prepare")
+local report_warning = logs.reporter("fonts","otf warning")
+
+registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
+registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
+
+registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
+registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
+registertracker("otf.actions","otf.replacements,otf.positions")
+registertracker("otf.injections","nodes.injections")
+
+registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
+
+local insert_node_after = node.insert_after
+local delete_node = nodes.delete
+local copy_node = node.copy
+local find_node_tail = node.tail or node.slide
+local flush_node_list = node.flush_list
+local end_of_math = node.end_of_math
+
+local setmetatableindex = table.setmetatableindex
+
+local zwnj = 0x200C
+local zwj = 0x200D
+local wildcard = "*"
+local default = "dflt"
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+local glyphcodes = nodes.glyphcodes
+local disccodes = nodes.disccodes
+
+local glyph_code = nodecodes.glyph
+local glue_code = nodecodes.glue
+local disc_code = nodecodes.disc
+local whatsit_code = nodecodes.whatsit
+local math_code = nodecodes.math
+
+local dir_code = whatcodes.dir
+local localpar_code = whatcodes.localpar
+
+local discretionary_code = disccodes.discretionary
+
+local ligature_code = glyphcodes.ligature
+
+local privateattribute = attributes.private
+
+-- Something is messed up: we have two mark / ligature indices, one at the injection
+-- end and one here ... this is bases in KE's patches but there is something fishy
+-- there as I'm pretty sure that for husayni we need some connection (as it's much
+-- more complex than an average font) but I need proper examples of all cases, not
+-- of only some.
+
+local a_state = privateattribute('state')
+local a_markbase = privateattribute('markbase')
+local a_markmark = privateattribute('markmark')
+local a_markdone = privateattribute('markdone') -- assigned at the injection end
+local a_cursbase = privateattribute('cursbase')
+local a_curscurs = privateattribute('curscurs')
+local a_cursdone = privateattribute('cursdone')
+local a_kernpair = privateattribute('kernpair')
+local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined)
+
+local injections = nodes.injections
+local setmark = injections.setmark
+local setcursive = injections.setcursive
+local setkern = injections.setkern
+local setpair = injections.setpair
+
+local markonce = true
+local cursonce = true
+local kernonce = true
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local onetimemessage = fonts.loggers.onetimemessage or function() end
+
+otf.defaultnodealternate = "none" -- first last
+
+-- we share some vars here, after all, we have no nested lookups and less code
+
+local tfmdata = false
+local characters = false
+local descriptions = false
+local resources = false
+local marks = false
+local currentfont = false
+local lookuptable = false
+local anchorlookups = false
+local lookuptypes = false
+local handlers = { }
+local rlmode = 0
+local featurevalue = false
+
+-- head is always a whatsit so we can safely assume that head is not changed
+
+-- we use this for special testing and documentation
+
+local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
+local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
+local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_direct(...)
+end
+
+local function logwarning(...)
+ report_direct(...)
+end
+
+local f_unicode = formatters["%U"]
+local f_uniname = formatters["%U (%s)"]
+local f_unilist = formatters["% t (% t)"]
+
+local function gref(n) -- currently the same as in font-otb
+ if type(n) == "number" then
+ local description = descriptions[n]
+ local name = description and description.name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num, nam = { }, { }
+ for i=1,#n do
+ local ni = n[i]
+ if tonumber(ni) then -- later we will start at 2
+ local di = descriptions[ni]
+ num[i] = f_unicode(ni)
+ nam[i] = di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return ""
+ end
+end
+
+local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_
+ if index then
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
+ elseif lookupname then
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
+ elseif chainlookupname then
+ return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
+ elseif chainname then
+ return formatters["feature %a, chain %a"](kind,chainname)
+ else
+ return formatters["feature %a"](kind)
+ end
+end
+
+local function pref(kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookupname)
+end
+
+-- We can assume that languages that use marks are not hyphenated. We can also assume
+-- that at most one discretionary is present.
+
+-- We do need components in funny kerning mode but maybe I can better reconstruct then
+-- as we do have the font components info available; removing components makes the
+-- previous code much simpler. Also, later on copying and freeing becomes easier.
+-- However, for arabic we need to keep them around for the sake of mark placement
+-- and indices.
+
+local function copy_glyph(g) -- next and prev are untouched !
+ local components = g.components
+ if components then
+ g.components = nil
+ local n = copy_node(g)
+ g.components = components
+ return n
+ else
+ return copy_node(g)
+ end
+end
+
+-- start is a mark and we need to keep that one
+
+local function markstoligature(kind,lookupname,head,start,stop,char)
+ if start == stop and start.char == char then
+ return head, start
+ else
+ local prev = start.prev
+ local next = stop.next
+ start.prev = nil
+ stop.next = nil
+ local base = copy_glyph(start)
+ if head == start then
+ head = base
+ end
+ base.char = char
+ base.subtype = ligature_code
+ base.components = start
+ if prev then
+ prev.next = base
+ end
+ if next then
+ next.prev = base
+ end
+ base.next = next
+ base.prev = prev
+ return head, base
+ end
+end
+
+-- The next code is somewhat complicated by the fact that some fonts can have ligatures made
+-- from ligatures that themselves have marks. This was identified by Kai in for instance
+-- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes
+-- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next
+-- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the
+-- third component.
+
+local function getcomponentindex(start)
+ if start.id ~= glyph_code then
+ return 0
+ elseif start.subtype == ligature_code then
+ local i = 0
+ local components = start.components
+ while components do
+ i = i + getcomponentindex(components)
+ components = components.next
+ end
+ return i
+ elseif not marks[start.char] then
+ return 1
+ else
+ return 0
+ end
+end
+
+-- eventually we will do positioning in an other way (needs addional w/h/d fields)
+
+local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
+ if start == stop and start.char == char then
+ start.char = char
+ return head, start
+ end
+ local prev = start.prev
+ local next = stop.next
+ start.prev = nil
+ stop.next = nil
+ local base = copy_glyph(start)
+ if start == head then
+ head = base
+ end
+ base.char = char
+ base.subtype = ligature_code
+ base.components = start -- start can have components
+ if prev then
+ prev.next = base
+ end
+ if next then
+ next.prev = base
+ end
+ base.next = next
+ base.prev = prev
+ if not discfound then
+ local deletemarks = markflag ~= "mark"
+ local components = start
+ local baseindex = 0
+ local componentindex = 0
+ local head = base
+ local current = base
+ -- first we loop over the glyphs in start .. stop
+ while start do
+ local char = start.char
+ if not marks[char] then
+ baseindex = baseindex + componentindex
+ componentindex = getcomponentindex(start)
+ elseif not deletemarks then -- quite fishy
+ start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components
+ elseif trace_marks then
+ logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
+ end
+ start = start.next
+ end
+ -- we can have one accent as part of a lookup and another following
+ -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
+ local start = current.next
+ while start and start.id == glyph_code do
+ local char = start.char
+ if marks[char] then
+ start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ else
+ break
+ end
+ start = start.next
+ end
+ end
+ return head, base
+end
+
+function handlers.gsub_single(head,start,kind,lookupname,replacement)
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
+ end
+ start.char = replacement
+ return head, start, true
+end
+
+local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
+ local n = #alternatives
+ if value == "random" then
+ local r = random(1,n)
+ return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r)
+ elseif value == "first" then
+ return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1)
+ elseif value == "last" then
+ return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n)
+ else
+ value = tonumber(value)
+ if type(value) ~= "number" then
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif value > n then
+ local defaultalt = otf.defaultnodealternate
+ if defaultalt == "first" then
+ return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif defaultalt == "last" then
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
+ else
+ return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
+ end
+ elseif value == 0 then
+ return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ elseif value < 1 then
+ return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
+ else
+ return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value)
+ end
+ end
+end
+
+local function multiple_glyphs(head,start,multiple,ignoremarks)
+ local nofmultiples = #multiple
+ if nofmultiples > 0 then
+ start.char = multiple[1]
+ if nofmultiples > 1 then
+ local sn = start.next
+ for k=2,nofmultiples do -- todo: use insert_node
+-- untested:
+--
+-- while ignoremarks and marks[sn.char] then
+-- local sn = sn.next
+-- end
+ local n = copy_node(start) -- ignore components
+ n.char = multiple[k]
+ n.next = sn
+ n.prev = start
+ if sn then
+ sn.prev = n
+ end
+ start.next = n
+ start = n
+ end
+ end
+ return head, start, true
+ else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(start.char))
+ end
+ return head, start, false
+ end
+end
+
+function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
+ local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
+ local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
+ end
+ start.char = choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
+ end
+ end
+ return head, start, true
+end
+
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
+ end
+ return multiple_glyphs(head,start,multiple,sequence.flags[1])
+end
+
+function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
+ local s, stop, discfound = start.next, nil, false
+ local startchar = start.char
+ if marks[startchar] then
+ while s do
+ local id = s.id
+ if id == glyph_code and s.font == currentfont and s.subtype<256 then
+ local lg = ligature[s.char]
+ if lg then
+ stop = s
+ ligature = lg
+ s = s.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if stop then
+ local lig = ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar = stop.char
+ head, start = markstoligature(kind,lookupname,head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head, start = markstoligature(kind,lookupname,head,start,stop,lig)
+ end
+ return head, start, true
+ else
+ -- ok, goto next lookup
+ end
+ end
+ else
+ local skipmark = sequence.flags[1]
+ while s do
+ local id = s.id
+ if id == glyph_code and s.subtype<256 then
+ if s.font == currentfont then
+ local char = s.char
+ if skipmark and marks[char] then
+ s = s.next
+ else
+ local lg = ligature[char]
+ if lg then
+ stop = s
+ ligature = lg
+ s = s.next
+ else
+ break
+ end
+ end
+ else
+ break
+ end
+ elseif id == disc_code then
+ discfound = true
+ s = s.next
+ else
+ break
+ end
+ end
+ local lig = ligature.ligature
+ if lig then
+ if stop then
+ if trace_ligatures then
+ local stopchar = stop.char
+ head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ end
+ return head, start, true
+ else
+ -- weird but happens (in some arabic font)
+ start.char = lig
+ if trace_ligatures then
+ logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
+ end
+ return head, start, true
+ end
+ else
+ -- weird but happens
+ end
+ end
+ return head, start, false
+end
+
+--[[ldx--
+We get hits on a mark, but we're not sure if the it has to be applied so
+we need to explicitly test for basechar, baselig and basemark entries.
+--ldx]]--
+
+function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
+ local markchar = start.char
+ if marks[markchar] then
+ local base = start.prev -- [glyph] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
+ if marks[basechar] then
+ while true do
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ end
+ if baseanchors then
+ local baseanchors = baseanchors['basechar']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
+ -- check chainpos variant
+ local markchar = start.char
+ if marks[markchar] then
+ local base = start.prev -- [glyph] [optional marks] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
+ if marks[basechar] then
+ while true do
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local index = start[a_ligacomp]
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ if baseanchors then
+ local baseanchors = baseanchors['baselig']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ ba = ba[index]
+ if ba then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head, start, true
+ else
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index)
+ end
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
+ local markchar = start.char
+ if marks[markchar] then
+ local base = start.prev -- [glyph] [basemark] [start=mark]
+ local slc = start[a_ligacomp]
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = base[a_ligacomp]
+ if blc and blc ~= slc then
+ base = base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
+ local basechar = base.char
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ if baseanchors then
+ baseanchors = baseanchors['basemark']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
+ local alreadydone = cursonce and start[a_cursbase]
+ if not alreadydone then
+ local done = false
+ local startchar = start.char
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt = start.next
+ while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
+ local nextchar = nxt.char
+ if marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = nxt.next
+ else
+ local entryanchors = descriptions[nextchar]
+ if entryanchors then
+ entryanchors = entryanchors.anchors
+ if entryanchors then
+ entryanchors = entryanchors['centry']
+ if entryanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, entry in next, entryanchors do
+ if al[anchor] then
+ local exit = exitanchors[anchor]
+ if exit then
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done = true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head, start, done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head, start, false
+ end
+end
+
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
+ local startchar = start.char
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ end
+ return head, start, false
+end
+
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
+ -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
+ -- todo: kerns in components of ligatures
+ local snext = start.next
+ if not snext then
+ return head, start, false
+ else
+ local prev, done = start, false
+ local factor = tfmdata.parameters.factor
+ local lookuptype = lookuptypes[lookupname]
+ while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
+ local nextchar = snext.char
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = snext.next
+ else
+ if not krn then
+ -- skip
+ elseif type(krn) == "table" then
+ if lookuptype == "pair" then -- probably not needed
+ local a, b = krn[2], krn[3]
+ if a and #a > 0 then
+ local startchar = start.char
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local startchar = start.char
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else -- wrong ... position has different entries
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ -- local a, b = krn[2], krn[6]
+ -- if a and a ~= 0 then
+ -- local k = setkern(snext,factor,rlmode,a)
+ -- if trace_kerns then
+ -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ -- end
+ -- end
+ -- if b and b ~= 0 then
+ -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
+ -- end
+ end
+ done = true
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done = true
+ end
+ break
+ end
+ end
+ return head, start, done
+ end
+end
+
+--[[ldx--
+I will implement multiple chain replacements once I run into a font that uses
+it. It's not that complex to handle.
+--ldx]]--
+
+local chainmores = { }
+local chainprocs = { }
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_subchain(...)
+end
+
+local logwarning = report_subchain
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_chain(...)
+end
+
+local logwarning = report_chain
+
+-- We could share functions but that would lead to extra function calls with many
+-- arguments, redundant tests and confusing messages.
+
+function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
+ logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head, start, false
+end
+
+function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
+ logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head, start, false
+end
+
+-- The reversesub is a special case, which is why we need to store the replacements
+-- in a bit weird way. There is no lookup and the replacement comes from the lookup
+-- itself. It is meant mostly for dealing with Urdu.
+
+function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
+ local char = start.char
+ local replacement = replacements[char]
+ if replacement then
+ if trace_singles then
+ logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
+ end
+ start.char = replacement
+ return head, start, true
+ else
+ return head, start, false
+ end
+end
+
+--[[ldx--
+This chain stuff is somewhat tricky since we can have a sequence of actions to be
+applied: single, alternate, multiple or ligature where ligature can be an invalid
+one in the sense that it will replace multiple by one but not neccessary one that
+looks like the combination (i.e. it is the counterpart of multiple then). For
+example, the following is valid:
+
+
+xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx
+
+
+Therefore we we don't really do the replacement here already unless we have the
+single lookup case. The efficiency of the replacements can be improved by deleting
+as less as needed but that would also make the code even more messy.
+--ldx]]--
+
+-- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start
+-- local n = 1
+-- if start == stop then
+-- -- done
+-- elseif ignoremarks then
+-- repeat -- start x x m x x stop => start m
+-- local next = start.next
+-- if not marks[next.char] then
+-- local components = next.components
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- end
+-- n = n + 1
+-- until next == stop
+-- else -- start x x x stop => start
+-- repeat
+-- local next = start.next
+-- local components = next.components
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- n = n + 1
+-- until next == stop
+-- end
+-- return head, n
+-- end
+
+--[[ldx--
+Here we replace start by a single variant, First we delete the rest of the
+match.
+--ldx]]--
+
+function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ -- todo: marks ?
+ local current = start
+ local subtables = currentlookup.subtables
+ if #subtables > 1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
+ while current do
+ if current.id == glyph_code then
+ local currentchar = current.char
+ local lookupname = subtables[1] -- only 1
+ local replacement = lookuphash[lookupname]
+ if not replacement then
+ if trace_bugs then
+ logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ replacement = replacement[currentchar]
+ if not replacement or replacement == "" then
+ if trace_bugs then
+ logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
+ end
+ else
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
+ end
+ current.char = replacement
+ end
+ end
+ return head, start, true
+ elseif current == stop then
+ break
+ else
+ current = current.next
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_single = chainprocs.gsub_single
+
+--[[ldx--
+Here we replace start by a sequence of new glyphs. First we delete the rest of
+the match.
+--ldx]]--
+
+function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ -- local head, n = delete_till_stop(head,start,stop)
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local replacements = lookuphash[lookupname]
+ if not replacements then
+ if trace_bugs then
+ logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ replacements = replacements[startchar]
+ if not replacements or replacement == "" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
+ end
+ else
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
+ end
+ return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_multiple = chainprocs.gsub_multiple
+
+--[[ldx--
+Here we replace start by new glyph. First we delete the rest of the match.
+--ldx]]--
+
+-- char_1 mark_1 -> char_x mark_1 (ignore marks)
+-- char_1 mark_1 -> char_x
+
+-- to be checked: do we always have just one glyph?
+-- we can also have alternates for marks
+-- marks come last anyway
+-- are there cases where we need to delete the mark
+
+function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local current = start
+ local subtables = currentlookup.subtables
+ local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
+ while current do
+ if current.id == glyph_code then -- is this check needed?
+ local currentchar = current.char
+ local lookupname = subtables[1]
+ local alternatives = lookuphash[lookupname]
+ if not alternatives then
+ if trace_bugs then
+ logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ alternatives = alternatives[currentchar]
+ if alternatives then
+ local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
+ end
+ start.char = choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
+ end
+ end
+ return head, start, true
+ elseif current == stop then
+ break
+ else
+ current = current.next
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_alternate = chainprocs.gsub_alternate
+
+--[[ldx--
+When we replace ligatures we use a helper that handles the marks. I might change
+this function (move code inline and handle the marks by a separate function). We
+assume rather stupid ligatures (no complex disc nodes).
+--ldx]]--
+
+function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local ligatures = lookuphash[lookupname]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ ligatures = ligatures[startchar]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ end
+ else
+ local s = start.next
+ local discfound = false
+ local last = stop
+ local nofreplacements = 0
+ local skipmark = currentlookup.flags[1]
+ while s do
+ local id = s.id
+ if id == disc_code then
+ s = s.next
+ discfound = true
+ else
+ local schar = s.char
+ if skipmark and marks[schar] then -- marks
+ s = s.next
+ else
+ local lg = ligatures[schar]
+ if lg then
+ ligatures, last, nofreplacements = lg, s, nofreplacements + 1
+ if s == stop then
+ break
+ else
+ s = s.next
+ end
+ else
+ break
+ end
+ end
+ end
+ end
+ local l2 = ligatures.ligature
+ if l2 then
+ if chainindex then
+ stop = last
+ end
+ if trace_ligatures then
+ if start == stop then
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
+ else
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ end
+ end
+ head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
+ return head, start, true, nofreplacements
+ elseif trace_bugs then
+ if start == stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
+ end
+ end
+ end
+ end
+ return head, start, false, 0
+end
+
+chainmores.gsub_ligature = chainprocs.gsub_ligature
+
+function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = start.char
+ if marks[markchar] then
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = start.prev -- [glyph] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
+ if marks[basechar] then
+ while true do
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors = baseanchors['basechar']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = start.char
+ if marks[markchar] then
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = start.prev -- [glyph] [optional marks] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
+ if marks[basechar] then
+ while true do
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
+ end
+ return head, start, false
+ end
+ end
+ end
+ -- todo: like marks a ligatures hash
+ local index = start[a_ligacomp]
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors = baseanchors['baselig']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ ba = ba[index]
+ if ba then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = start.char
+ if marks[markchar] then
+ -- local alreadydone = markonce and start[a_markmark]
+ -- if not alreadydone then
+ -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = start.prev -- [glyph] [basemark] [start=mark]
+ local slc = start[a_ligacomp]
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = base[a_ligacomp]
+ if blc and blc ~= slc then
+ base = base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
+ local basechar = base.char
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors = baseanchors['basemark']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ -- elseif trace_marks and trace_details then
+ -- logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone)
+ -- end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local alreadydone = cursonce and start[a_cursbase]
+ if not alreadydone then
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local exitanchors = lookuphash[lookupname]
+ if exitanchors then
+ exitanchors = exitanchors[startchar]
+ end
+ if exitanchors then
+ local done = false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt = start.next
+ while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
+ local nextchar = nxt.char
+ if marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = nxt.next
+ else
+ local entryanchors = descriptions[nextchar]
+ if entryanchors then
+ entryanchors = entryanchors.anchors
+ if entryanchors then
+ entryanchors = entryanchors['centry']
+ if entryanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, entry in next, entryanchors do
+ if al[anchor] then
+ local exit = exitanchors[anchor]
+ if exit then
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done = true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head, start, done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head, start, false
+ end
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ -- untested .. needs checking for the new model
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local kerns = lookuphash[lookupname]
+ if kerns then
+ kerns = kerns[startchar] -- needed ?
+ if kerns then
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ end
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gpos_single = chainprocs.gpos_single -- okay?
+
+-- when machines become faster i will make a shared function
+
+function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local snext = start.next
+ if snext then
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local kerns = lookuphash[lookupname]
+ if kerns then
+ kerns = kerns[startchar]
+ if kerns then
+ local lookuptype = lookuptypes[lookupname]
+ local prev, done = start, false
+ local factor = tfmdata.parameters.factor
+ while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
+ local nextchar = snext.char
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = snext.next
+ else
+ if not krn then
+ -- skip
+ elseif type(krn) == "table" then
+ if lookuptype == "pair" then
+ local a, b = krn[2], krn[3]
+ if a and #a > 0 then
+ local startchar = start.char
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local startchar = start.char
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ local a, b = krn[2], krn[6]
+ if a and a ~= 0 then
+ local k = setkern(snext,factor,rlmode,a)
+ if trace_kerns then
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ end
+ if b and b ~= 0 then
+ logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
+ end
+ end
+ done = true
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done = true
+ end
+ break
+ end
+ end
+ return head, start, done
+ end
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gpos_pair = chainprocs.gpos_pair -- okay?
+
+-- what pointer to return, spec says stop
+-- to be discussed ... is bidi changer a space?
+-- elseif char == zwnj and sequence[n][32] then -- brrr
+
+-- somehow l or f is global
+-- we don't need to pass the currentcontext, saves a bit
+-- make a slow variant then can be activated but with more tracing
+
+local function show_skip(kind,chainname,char,ck,class)
+ if ck[9] then
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
+end
+
+local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
+ -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
+ local flags = sequence.flags
+ local done = false
+ local skipmark = flags[1]
+ local skipligature = flags[2]
+ local skipbase = flags[3]
+ local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !)
+ local markclass = sequence.markclass -- todo, first we need a proper test
+ local skipped = false
+ for k=1,#contexts do
+ local match = true
+ local current = start
+ local last = start
+ local ck = contexts[k]
+ local seq = ck[3]
+ local s = #seq
+ -- f..l = mid string
+ if s == 1 then
+ -- never happens
+ match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char]
+ else
+ -- maybe we need a better space check (maybe check for glue or category or combination)
+ -- we cannot optimize for n=2 because there can be disc nodes
+ local f, l = ck[4], ck[5]
+ -- current match
+ if f == 1 and f == l then -- current only
+ -- already a hit
+ -- match = true
+ else -- before/current/after | before/current | current/after
+ -- no need to test first hit (to be optimized)
+ if f == l then -- new, else last out of sync (f is > 1)
+ -- match = true
+ else
+ local n = f + 1
+ last = last.next
+ while n <= l do
+ if last then
+ local id = last.id
+ if id == glyph_code then
+ if last.font == currentfont and last.subtype<256 then
+ local char = last.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ last = last.next
+ elseif seq[n][char] then
+ if n < l then
+ last = last.next
+ end
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ last = last.next
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ end
+ end
+ end
+ -- before
+ if match and f > 1 then
+ local prev = start.prev
+ if prev then
+ local n = f-1
+ while n >= 1 do
+ if prev then
+ local id = prev.id
+ if id == glyph_code then
+ if prev.font == currentfont and prev.subtype<256 then -- normal char
+ local char = prev.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n = n -1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ -- skip 'm
+ elseif seq[n][32] then
+ n = n -1
+ else
+ match = false
+ break
+ end
+ prev = prev.prev
+ elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
+ n = n -1
+ else
+ match = false
+ break
+ end
+ end
+ elseif f == 2 then
+ match = seq[1][32]
+ else
+ for n=f-1,1 do
+ if not seq[n][32] then
+ match = false
+ break
+ end
+ end
+ end
+ end
+ -- after
+ if match and s > l then
+ local current = last and last.next
+ if current then
+ -- removed optimization for s-l == 1, we have to deal with marks anyway
+ local n = l + 1
+ while n <= s do
+ if current then
+ local id = current.id
+ if id == glyph_code then
+ if current.font == currentfont and current.subtype<256 then -- normal char
+ local char = current.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ -- skip 'm
+ elseif seq[n][32] then -- brrr
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ current = current.next
+ elseif seq[n][32] then
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ end
+ elseif s-l == 1 then
+ match = seq[s][32]
+ else
+ for n=l+1,s do
+ if not seq[n][32] then
+ match = false
+ break
+ end
+ end
+ end
+ end
+ end
+ if match then
+ -- ck == currentcontext
+ if trace_contexts then
+ local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
+ local char = start.char
+ if ck[9] then
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ else
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ end
+ end
+ local chainlookups = ck[6]
+ if chainlookups then
+ local nofchainlookups = #chainlookups
+ -- we can speed this up if needed
+ if nofchainlookups == 1 then
+ local chainlookupname = chainlookups[1]
+ local chainlookup = lookuptable[chainlookupname]
+ if chainlookup then
+ local cp = chainprocs[chainlookup.type]
+ if cp then
+ local ok
+ head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ if ok then
+ done = true
+ end
+ else
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ end
+ else -- shouldn't happen
+ logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
+ end
+ else
+ local i = 1
+ repeat
+ if skipped then
+ while true do
+ local char = start.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ start = start.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ end
+ local chainlookupname = chainlookups[i]
+ local chainlookup = lookuptable[chainlookupname]
+ if not chainlookup then
+ -- okay, n matches, < n replacements
+ i = i + 1
+ else
+ local cp = chainmores[chainlookup.type]
+ if not cp then
+ -- actually an error
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ i = i + 1
+ else
+ local ok, n
+ head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ -- messy since last can be changed !
+ if ok then
+ done = true
+ -- skip next one(s) if ligature
+ i = i + (n or 1)
+ else
+ i = i + 1
+ end
+ end
+ end
+ if start then
+ start = start.next
+ else
+ -- weird
+ end
+ until i > nofchainlookups
+ end
+ else
+ local replacements = ck[7]
+ if replacements then
+ head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
+ else
+ done = true -- can be meant to be skipped
+ if trace_contexts then
+ logprocess("%s: skipping match",cref(kind,chainname))
+ end
+ end
+ end
+ end
+ end
+ return head, start, done
+end
+
+-- Because we want to keep this elsewhere (an because speed is less an issue) we
+-- pass the font id so that the verbose variant can access the relevant helper tables.
+
+local verbose_handle_contextchain = function(font,...)
+ logwarning("no verbose handler installed, reverting to 'normal'")
+ otf.setcontextchain()
+ return normal_handle_contextchain(...)
+end
+
+otf.chainhandlers = {
+ normal = normal_handle_contextchain,
+ verbose = verbose_handle_contextchain,
+}
+
+function otf.setcontextchain(method)
+ if not method or method == "normal" or not otf.chainhandlers[method] then
+ if handlers.contextchain then -- no need for a message while making the format
+ logwarning("installing normal contextchain handler")
+ end
+ handlers.contextchain = normal_handle_contextchain
+ else
+ logwarning("installing contextchain handler %a",method)
+ local handler = otf.chainhandlers[method]
+ handlers.contextchain = function(...)
+ return handler(currentfont,...) -- hm, get rid of ...
+ end
+ end
+ handlers.gsub_context = handlers.contextchain
+ handlers.gsub_contextchain = handlers.contextchain
+ handlers.gsub_reversecontextchain = handlers.contextchain
+ handlers.gpos_contextchain = handlers.contextchain
+ handlers.gpos_context = handlers.contextchain
+end
+
+otf.setcontextchain()
+
+local missing = { } -- we only report once
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_process(...)
+end
+
+local logwarning = report_process
+
+local function report_missing_cache(typ,lookup)
+ local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
+ local t = f[typ] if not t then t = { } f[typ] = t end
+ if not t[lookup] then
+ t[lookup] = true
+ logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
+ end
+end
+
+local resolved = { } -- we only resolve a font,script,language pair once
+
+-- todo: pass all these 'locals' in a table
+
+local lookuphashes = { }
+
+setmetatableindex(lookuphashes, function(t,font)
+ local lookuphash = fontdata[font].resources.lookuphash
+ if not lookuphash or not next(lookuphash) then
+ lookuphash = false
+ end
+ t[font] = lookuphash
+ return lookuphash
+end)
+
+-- fonts.hashes.lookups = lookuphashes
+
+local autofeatures = fonts.analyzers.features -- was: constants
+
+local function initialize(sequence,script,language,enabled)
+ local features = sequence.features
+ if features then
+ local order = sequence.order
+ if order then
+ for i=1,#order do
+ local kind = order[i] --
+ local valid = enabled[kind]
+ if valid then
+ local scripts = features[kind] --
+ local languages = scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence }
+ end
+ end
+ end
+ else
+ -- can't happen
+ end
+ end
+ return false
+end
+
+function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
+ local shared = tfmdata.shared
+ local properties = tfmdata.properties
+ local language = properties.language or "dflt"
+ local script = properties.script or "dflt"
+ local enabled = shared.features
+ local res = resolved[font]
+ if not res then
+ res = { }
+ resolved[font] = res
+ end
+ local rs = res[script]
+ if not rs then
+ rs = { }
+ res[script] = rs
+ end
+ local rl = rs[language]
+ if not rl then
+ rl = {
+ -- indexed but we can also add specific data by key
+ }
+ rs[language] = rl
+ local sequences = tfmdata.resources.sequences
+ for s=1,#sequences do
+ local v = enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1] = v
+ end
+ end
+ end
+ return rl
+end
+
+-- elseif id == glue_code then
+-- if p[5] then -- chain
+-- local pc = pp[32]
+-- if pc then
+-- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4])
+-- if ok then
+-- done = true
+-- end
+-- if start then start = start.next end
+-- else
+-- start = start.next
+-- end
+-- else
+-- start = start.next
+-- end
+
+-- there will be a new direction parser (pre-parsed etc)
+
+-- less bytecode: 290 -> 254
+--
+-- attr = attr or false
+--
+-- local a = getattr(start,0)
+-- if (a == attr and (not attribute or getattr(start,a_state) == attribute)) or (not attribute or getattr(start,a_state) == attribute) then
+-- -- the action
+-- end
+
+local function featuresprocessor(head,font,attr)
+
+ local lookuphash = lookuphashes[font] -- we can also check sequences here
+
+ if not lookuphash then
+ return head, false
+ end
+
+ if trace_steps then
+ checkstep(head)
+ end
+
+ tfmdata = fontdata[font]
+ descriptions = tfmdata.descriptions
+ characters = tfmdata.characters
+ resources = tfmdata.resources
+
+ marks = resources.marks
+ anchorlookups = resources.lookup_to_anchor
+ lookuptable = resources.lookups
+ lookuptypes = resources.lookuptypes
+
+ currentfont = font
+ rlmode = 0
+
+ local sequences = resources.sequences
+ local done = false
+ local datasets = otf.dataset(tfmdata,font,attr)
+
+ local dirstack = { } -- could move outside function
+
+ -- We could work on sub start-stop ranges instead but I wonder if there is that
+ -- much speed gain (experiments showed that it made not much sense) and we need
+ -- to keep track of directions anyway. Also at some point I want to play with
+ -- font interactions and then we do need the full sweeps.
+
+ -- Keeping track of the headnode is needed for devanagari (I generalized it a bit
+ -- so that multiple cases are also covered.)
+
+ for s=1,#datasets do
+ local dataset = datasets[s]
+ featurevalue = dataset[1] -- todo: pass to function instead of using a global
+
+ local sequence = dataset[5] -- sequences[s] -- also dataset[5]
+ local rlparmode = 0
+ local topstack = 0
+ local success = false
+ local attribute = dataset[2]
+ local chain = dataset[3] -- sequence.chain or 0
+ local typ = sequence.type
+ local subtables = sequence.subtables
+ if chain < 0 then
+ -- this is a limited case, no special treatments like 'init' etc
+ local handler = handlers[typ]
+ -- we need to get rid of this slide! probably no longer needed in latest luatex
+ local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
+ while start do
+ local id = start.id
+ if id == glyph_code then
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
+ if a then
+ a = a == attr
+ else
+ a = true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = start.prev end
+ else
+ start = start.prev
+ end
+ else
+ start = start.prev
+ end
+ else
+ start = start.prev
+ end
+ end
+ else
+ local handler = handlers[typ]
+ local ns = #subtables
+ local start = head -- local ?
+ rlmode = 0 -- to be checked ?
+ if ns == 1 then -- happens often
+ local lookupname = subtables[1]
+ local lookupcache = lookuphash[lookupname]
+ if not lookupcache then -- also check for empty cache
+ report_missing_cache(typ,lookupname)
+ else
+
+ local function subrun(start)
+ -- mostly for gsub, gpos would demand a more clever approach
+ local head = start
+ local done = false
+ while start do
+ local id = start.id
+ if id == glyph_code and start.font == font and start.subtype <256 then
+ local a = start[0]
+ if a then
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
+ else
+ a = not attribute or start[a_state] == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ end
+ end
+ if start then start = start.next end
+ else
+ start = start.next
+ end
+ else
+ start = start.next
+ end
+ end
+ if done then
+ success = true
+ return head
+ end
+ end
+
+ local function kerndisc(disc) -- we can assume that prev and next are glyphs
+ local prev = disc.prev
+ local next = disc.next
+ if prev and next then
+ prev.next = next
+ -- next.prev = prev
+ local a = prev[0]
+ if a then
+ a = (a == attr) and (not attribute or prev[a_state] == attribute)
+ else
+ a = not attribute or prev[a_state] == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[prev.char]
+ if lookupmatch then
+ -- sequence kan weg
+ local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ success = true
+ end
+ end
+ end
+ prev.next = disc
+ -- next.prev = disc
+ end
+ return next
+ end
+
+ while start do
+ local id = start.id
+ if id == glyph_code then
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
+ if a then
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
+ else
+ a = not attribute or start[a_state] == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success = true
+ end
+ end
+ if start then start = start.next end
+ else
+ start = start.next
+ end
+ else
+ start = start.next
+ end
+ elseif id == disc_code then
+ -- mostly for gsub
+ if start.subtype == discretionary_code then
+ local pre = start.pre
+ if pre then
+ local new = subrun(pre)
+ if new then start.pre = new end
+ end
+ local post = start.post
+ if post then
+ local new = subrun(post)
+ if new then start.post = new end
+ end
+ local replace = start.replace
+ if replace then
+ local new = subrun(replace)
+ if new then start.replace = new end
+ end
+elseif typ == "gpos_single" or typ == "gpos_pair" then
+ kerndisc(start)
+ end
+ start = start.next
+ elseif id == whatsit_code then -- will be function
+ local subtype = start.subtype
+ if subtype == dir_code then
+ local dir = start.dir
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype == localpar_code then
+ local dir = start.dir
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ -- one might wonder if the par dir should be looked at, so we might as well drop the next line
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start = start.next
+ elseif id == math_code then
+ start = end_of_math(start).next
+ else
+ start = start.next
+ end
+ end
+ end
+ else
+
+ local function subrun(start)
+ -- mostly for gsub, gpos would demand a more clever approach
+ local head = start
+ local done = false
+ while start do
+ local id = start.id
+ if id == glyph_code and start.id == font and start.subtype <256 then
+ local a = start[0]
+ if a then
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
+ else
+ a = not attribute or start[a_state] == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = start.next end
+ else
+ start = start.next
+ end
+ else
+ start = start.next
+ end
+ end
+ if done then
+ success = true
+ return head
+ end
+ end
+
+ local function kerndisc(disc) -- we can assume that prev and next are glyphs
+ local prev = disc.prev
+ local next = disc.next
+ if prev and next then
+ prev.next = next
+ -- next.prev = prev
+ local a = prev[0]
+ if a then
+ a = (a == attr) and (not attribute or prev[a_state] == attribute)
+ else
+ a = not attribute or prev[a_state] == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[prev.char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ end
+ prev.next = disc
+ -- next.prev = disc
+ end
+ return next
+ end
+
+ while start do
+ local id = start.id
+ if id == glyph_code then
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
+ if a then
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
+ else
+ a = not attribute or start[a_state] == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = start.next end
+ else
+ start = start.next
+ end
+ else
+ start = start.next
+ end
+ elseif id == disc_code then
+ -- mostly for gsub
+ if start.subtype == discretionary_code then
+ local pre = start.pre
+ if pre then
+ local new = subrun(pre)
+ if new then start.pre = new end
+ end
+ local post = start.post
+ if post then
+ local new = subrun(post)
+ if new then start.post = new end
+ end
+ local replace = start.replace
+ if replace then
+ local new = subrun(replace)
+ if new then start.replace = new end
+ end
+elseif typ == "gpos_single" or typ == "gpos_pair" then
+ kerndisc(start)
+ end
+ start = start.next
+ elseif id == whatsit_code then
+ local subtype = start.subtype
+ if subtype == dir_code then
+ local dir = start.dir
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype == localpar_code then
+ local dir = start.dir
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start = start.next
+ elseif id == math_code then
+ start = end_of_math(start).next
+ else
+ start = start.next
+ end
+ end
+ end
+ end
+ if success then
+ done = true
+ end
+ if trace_steps then -- ?
+ registerstep(head)
+ end
+ end
+ return head, done
+end
+
+local function generic(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if target then
+ target[unicode] = lookupdata
+ else
+ lookuphash[lookupname] = { [unicode] = lookupdata }
+ end
+end
+
+local action = {
+
+ substitution = generic,
+ multiple = generic,
+ alternate = generic,
+ position = generic,
+
+ ligature = function(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if not target then
+ target = { }
+ lookuphash[lookupname] = target
+ end
+ for i=1,#lookupdata do
+ local li = lookupdata[i]
+ local tu = target[li]
+ if not tu then
+ tu = { }
+ target[li] = tu
+ end
+ target = tu
+ end
+ target.ligature = unicode
+ end,
+
+ pair = function(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if not target then
+ target = { }
+ lookuphash[lookupname] = target
+ end
+ local others = target[unicode]
+ local paired = lookupdata[1]
+ if others then
+ others[paired] = lookupdata
+ else
+ others = { [paired] = lookupdata }
+ target[unicode] = others
+ end
+ end,
+
+}
+
+local function prepare_lookups(tfmdata)
+
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata.resources
+ local lookuphash = resources.lookuphash
+ local anchor_to_lookup = resources.anchor_to_lookup
+ local lookup_to_anchor = resources.lookup_to_anchor
+ local lookuptypes = resources.lookuptypes
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+
+ -- we cannot free the entries in the descriptions as sometimes we access
+ -- then directly (for instance anchors) ... selectively freeing does save
+ -- much memory as it's only a reference to a table and the slot in the
+ -- description hash is not freed anyway
+
+ for unicode, character in next, characters do -- we cannot loop over descriptions !
+
+ local description = descriptions[unicode]
+
+ if description then
+
+ local lookups = description.slookups
+ if lookups then
+ for lookupname, lookupdata in next, lookups do
+ action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+
+ local lookups = description.mlookups
+ if lookups then
+ for lookupname, lookuplist in next, lookups do
+ local lookuptype = lookuptypes[lookupname]
+ for l=1,#lookuplist do
+ local lookupdata = lookuplist[l]
+ action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ end
+
+ local list = description.kerns
+ if list then
+ for lookup, krn in next, list do -- ref to glyph, saves lookup
+ local target = lookuphash[lookup]
+ if target then
+ target[unicode] = krn
+ else
+ lookuphash[lookup] = { [unicode] = krn }
+ end
+ end
+ end
+
+ local list = description.anchors
+ if list then
+ for typ, anchors in next, list do -- types
+ if typ == "mark" or typ == "cexit" then -- or entry?
+ for name, anchor in next, anchors do
+ local lookups = anchor_to_lookup[name]
+ if lookups then
+ for lookup, _ in next, lookups do
+ local target = lookuphash[lookup]
+ if target then
+ target[unicode] = anchors
+ else
+ lookuphash[lookup] = { [unicode] = anchors }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ end
+
+ end
+
+end
+
+local function split(replacement,original)
+ local result = { }
+ for i=1,#replacement do
+ result[original[i]] = replacement[i]
+ end
+ return result
+end
+
+local valid = {
+ coverage = { chainsub = true, chainpos = true, contextsub = true },
+ reversecoverage = { reversesub = true },
+ glyphs = { chainsub = true, chainpos = true },
+}
+
+local function prepare_contextchains(tfmdata)
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata.resources
+ local lookuphash = resources.lookuphash
+ local lookups = rawdata.lookups
+ if lookups then
+ for lookupname, lookupdata in next, rawdata.lookups do
+ local lookuptype = lookupdata.type
+ if lookuptype then
+ local rules = lookupdata.rules
+ if rules then
+ local format = lookupdata.format
+ local validformat = valid[format]
+ if not validformat then
+ report_prepare("unsupported format %a",format)
+ elseif not validformat[lookuptype] then
+ -- todo: dejavu-serif has one (but i need to see what use it has)
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
+ else
+ local contexts = lookuphash[lookupname]
+ if not contexts then
+ contexts = { }
+ lookuphash[lookupname] = contexts
+ end
+ local t, nt = { }, 0
+ for nofrules=1,#rules do
+ local rule = rules[nofrules]
+ local current = rule.current
+ local before = rule.before
+ local after = rule.after
+ local replacements = rule.replacements
+ local sequence = { }
+ local nofsequences = 0
+ -- Eventually we can store start, stop and sequence in the cached file
+ -- but then less sharing takes place so best not do that without a lot
+ -- of profiling so let's forget about it.
+ if before then
+ for n=1,#before do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = before[n]
+ end
+ end
+ local start = nofsequences + 1
+ for n=1,#current do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = current[n]
+ end
+ local stop = nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = after[n]
+ end
+ end
+ if sequence[1] then
+ -- Replacements only happen with reverse lookups as they are single only. We
+ -- could pack them into current (replacement value instead of true) and then
+ -- use sequence[start] instead but it's somewhat ugly.
+ nt = nt + 1
+ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
+ for unic, _ in next, sequence[start] do
+ local cu = contexts[unic]
+ if not cu then
+ contexts[unic] = t
+ end
+ end
+ end
+ end
+ end
+ else
+ -- no rules
+ end
+ else
+ report_prepare("missing lookuptype for lookupname %a",lookupname)
+ end
+ end
+ end
+end
+
+-- we can consider lookuphash == false (initialized but empty) vs lookuphash == table
+
+local function featuresinitializer(tfmdata,value)
+ if true then -- value then
+ -- beware we need to use the topmost properties table
+ local rawdata = tfmdata.shared.rawdata
+ local properties = rawdata.properties
+ if not properties.initialized then
+ local starttime = trace_preparing and os.clock()
+ local resources = rawdata.resources
+ resources.lookuphash = resources.lookuphash or { }
+ prepare_contextchains(tfmdata)
+ prepare_lookups(tfmdata)
+ properties.initialized = true
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
+ end
+ end
+ end
+end
+
+registerotffeature {
+ name = "features",
+ description = "features",
+ default = true,
+ initializers = {
+ position = 1,
+ node = featuresinitializer,
+ },
+ processors = {
+ node = featuresprocessor,
+ }
+}
+
+-- This can be used for extra handlers, but should be used with care!
+
+otf.handlers = handlers
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-syn.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-syn.lua
index ea6e3cab5ee..f03d558bfb4 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-syn.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts-syn.lua
@@ -100,3 +100,7 @@ fonts.names.resolvespec = fonts.names.resolve -- only supported in mkiv
function fonts.names.getfilename(askedname,suffix) -- only supported in mkiv
return ""
end
+
+function fonts.names.ignoredfile(filename) -- only supported in mkiv
+ return false -- will be overloaded
+end
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua
index 89592fcacf8..5e5c9a4cfa6 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.lua
@@ -192,7 +192,7 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then
-- with context. The mtx-fonts script can be used to genate this file (using the --names option).
-- In 2013/14 I will merge/move some generic files into luatex-fonts-* files (copies) so that
- -- intermediate updates of context not interfere. We can then also use the general merger and
+ -- intermediate updates of context don't interfere. We can then also use the general merger and
-- consider stripping debug code.
loadmodule('font-ini.lua')
@@ -201,15 +201,19 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then
loadmodule('font-cid.lua')
loadmodule('font-map.lua') -- for loading lum file (will be stripped)
loadmodule('luatex-fonts-syn.lua') -- deals with font names (synonyms)
+ -- begin of test
+ loadmodule('font-tfm.lua') -- optional
+ loadmodule('font-afm.lua') -- optional
+ loadmodule('font-afk.lua') -- optional
+ -- end of test
loadmodule('luatex-fonts-tfm.lua')
loadmodule('font-oti.lua')
loadmodule('font-otf.lua')
loadmodule('font-otb.lua')
- loadmodule('node-inj.lua') -- will be replaced (luatex >= .70)
+ loadmodule('luatex-fonts-inj.lua') -- will be replaced (luatex >= .80)
loadmodule('font-ota.lua')
- loadmodule('font-otn.lua')
+ loadmodule('luatex-fonts-otn.lua')
loadmodule('font-otp.lua') -- optional
- ----------('luatex-fonts-chr.lua')
loadmodule('luatex-fonts-lua.lua')
loadmodule('font-def.lua')
loadmodule('luatex-fonts-def.lua')
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.tex b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.tex
index a7c8bc2b8fd..7b457e9b4fc 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.tex
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-fonts.tex
@@ -132,7 +132,9 @@
%D and the \CONTEXT\ wiki.
\directlua {
- dofile(kpse.find_file("luatex-fonts.lua","tex"))
+ if not fonts then
+ dofile(kpse.find_file("luatex-fonts.lua","tex"))
+ end
}
\endinput
diff --git a/Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex b/Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex
index fcc837e7041..fbf8ce3cf2c 100644
--- a/Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex
+++ b/Master/texmf-dist/tex/generic/context/luatex/luatex-test.tex
@@ -80,4 +80,8 @@ $$\left( { {1} \over { {1} \over {x} } } \right) $$
$$\sqrt {2} { { {1} \over { {1} \over {x} } } } $$
+\font\cows=file:koeieletters.afm at 50pt
+
+\cows Hello World!
+
\end
--
cgit v1.2.3