summaryrefslogtreecommitdiff
path: root/Master/texmf-dist/scripts/context/stubs/unix/mtxrun
diff options
context:
space:
mode:
authorTaco Hoekwater <taco@elvenkind.com>2010-05-24 14:05:02 +0000
committerTaco Hoekwater <taco@elvenkind.com>2010-05-24 14:05:02 +0000
commit57ea7dad48fbf2541c04e434c31bde655ada3ac4 (patch)
tree1f8b43bc7cb92939271e1f5bec610710be69097f /Master/texmf-dist/scripts/context/stubs/unix/mtxrun
parent6ee41e1f1822657f7f23231ec56c0272de3855e3 (diff)
here is context 2010.05.24 13:05
git-svn-id: svn://tug.org/texlive/trunk@18445 c570f23f-e606-0410-a88d-b1316a301751
Diffstat (limited to 'Master/texmf-dist/scripts/context/stubs/unix/mtxrun')
-rw-r--r--Master/texmf-dist/scripts/context/stubs/unix/mtxrun7371
1 files changed, 4749 insertions, 2622 deletions
diff --git a/Master/texmf-dist/scripts/context/stubs/unix/mtxrun b/Master/texmf-dist/scripts/context/stubs/unix/mtxrun
index 82d1edecbc5..b99327692d7 100644
--- a/Master/texmf-dist/scripts/context/stubs/unix/mtxrun
+++ b/Master/texmf-dist/scripts/context/stubs/unix/mtxrun
@@ -48,13 +48,16 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['l-string'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-local sub, gsub, find, match, gmatch, format, char, byte, rep = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep
+local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
+local lpegmatch = lpeg.match
+
+-- some functions may disappear as they are not used anywhere
if not string.split then
@@ -94,8 +97,16 @@ function string:unquote()
return (gsub(self,"^([\"\'])(.*)%1$","%2"))
end
+--~ function string:unquote()
+--~ if find(self,"^[\'\"]") then
+--~ return sub(self,2,-2)
+--~ else
+--~ return self
+--~ end
+--~ end
+
function string:quote() -- we could use format("%q")
- return '"' .. self:unquote() .. '"'
+ return format("%q",self)
end
function string:count(pattern) -- variant 3
@@ -115,12 +126,23 @@ function string:limit(n,sentinel)
end
end
-function string:strip()
- return (gsub(self,"^%s*(.-)%s*$", "%1"))
+--~ function string:strip() -- the .- is quite efficient
+--~ -- return match(self,"^%s*(.-)%s*$") or ""
+--~ -- return match(self,'^%s*(.*%S)') or '' -- posted on lua list
+--~ return find(s,'^%s*$') and '' or match(s,'^%s*(.*%S)')
+--~ end
+
+do -- roberto's variant:
+ local space = lpeg.S(" \t\v\n")
+ local nospace = 1 - space
+ local stripper = space^0 * lpeg.C((space^0 * nospace^1)^0)
+ function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+ end
end
function string:is_empty()
- return not find(find,"%S")
+ return not find(self,"%S")
end
function string:enhance(pattern,action)
@@ -154,14 +176,14 @@ if not string.characters then
local function nextchar(str, index)
index = index + 1
- return (index <= #str) and index or nil, str:sub(index,index)
+ return (index <= #str) and index or nil, sub(str,index,index)
end
function string:characters()
return nextchar, self, 0
end
local function nextbyte(str, index)
index = index + 1
- return (index <= #str) and index or nil, byte(str:sub(index,index))
+ return (index <= #str) and index or nil, byte(sub(str,index,index))
end
function string:bytes()
return nextbyte, self, 0
@@ -174,7 +196,7 @@ end
function string:rpadd(n,chr)
local m = n-#self
if m > 0 then
- return self .. self.rep(chr or " ",m)
+ return self .. rep(chr or " ",m)
else
return self
end
@@ -183,7 +205,7 @@ end
function string:lpadd(n,chr)
local m = n-#self
if m > 0 then
- return self.rep(chr or " ",m) .. self
+ return rep(chr or " ",m) .. self
else
return self
end
@@ -231,6 +253,17 @@ function string:pattesc()
return (gsub(self,".",patterns_escapes))
end
+local simple_escapes = {
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+function string:simpleesc()
+ return (gsub(self,".",simple_escapes))
+end
+
function string:tohash()
local t = { }
for s in gmatch(self,"([^, ]+)") do -- lpeg
@@ -242,10 +275,10 @@ end
local pattern = lpeg.Ct(lpeg.C(1)^0)
function string:totable()
- return pattern:match(self)
+ return lpegmatch(pattern,self)
end
---~ for _, str in ipairs {
+--~ local t = {
--~ "1234567123456712345671234567",
--~ "a\tb\tc",
--~ "aa\tbb\tcc",
@@ -253,7 +286,10 @@ end
--~ "aaaa\tbbbb\tcccc",
--~ "aaaaa\tbbbbb\tccccc",
--~ "aaaaaa\tbbbbbb\tcccccc",
---~ } do print(string.tabtospace(str)) end
+--~ }
+--~ for k,v do
+--~ print(string.tabtospace(t[k]))
+--~ end
function string.tabtospace(str,tab)
-- we don't handle embedded newlines
@@ -261,7 +297,7 @@ function string.tabtospace(str,tab)
local s = find(str,"\t")
if s then
if not tab then tab = 7 end -- only when found
- local d = tab-(s-1)%tab
+ local d = tab-(s-1) % tab
if d > 0 then
str = gsub(str,"\t",rep(" ",d),1)
else
@@ -280,6 +316,25 @@ function string:compactlong() -- strips newlines and leading spaces
return self
end
+function string:striplong() -- strips newlines and leading spaces
+ self = gsub(self,"^%s*","")
+ self = gsub(self,"[\n\r]+ *","\n")
+ return self
+end
+
+function string:topattern(lowercase,strict)
+ if lowercase then
+ self = lower(self)
+ end
+ self = gsub(self,".",simple_escapes)
+ if self == "" then
+ self = ".*"
+ elseif strict then
+ self = "^" .. self .. "$"
+ end
+ return self
+end
+
end -- of closure
@@ -287,58 +342,64 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['l-lpeg'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-local P, S, Ct, C, Cs, Cc = lpeg.P, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-
---~ l-lpeg.lua :
-
---~ lpeg.digit = lpeg.R('09')^1
---~ lpeg.sign = lpeg.S('+-')^1
---~ lpeg.cardinal = lpeg.P(lpeg.sign^0 * lpeg.digit^1)
---~ lpeg.integer = lpeg.P(lpeg.sign^0 * lpeg.digit^1)
---~ lpeg.float = lpeg.P(lpeg.sign^0 * lpeg.digit^0 * lpeg.P('.') * lpeg.digit^1)
---~ lpeg.number = lpeg.float + lpeg.integer
---~ lpeg.oct = lpeg.P("0") * lpeg.R('07')^1
---~ lpeg.hex = lpeg.P("0x") * (lpeg.R('09') + lpeg.R('AF'))^1
---~ lpeg.uppercase = lpeg.P("AZ")
---~ lpeg.lowercase = lpeg.P("az")
-
---~ lpeg.eol = lpeg.S('\r\n\f')^1 -- includes formfeed
---~ lpeg.space = lpeg.S(' ')^1
---~ lpeg.nonspace = lpeg.P(1-lpeg.space)^1
---~ lpeg.whitespace = lpeg.S(' \r\n\f\t')^1
---~ lpeg.nonwhitespace = lpeg.P(1-lpeg.whitespace)^1
-
-local hash = { }
+local lpeg = require("lpeg")
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, Ct, C, Cs, Cc, V = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.V
+local match = lpeg.match
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local utf8byte = R("\128\191")
+
+patterns.utf8byte = utf8byte
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8byte
+patterns.utf8three = R("\224\239") * utf8byte * utf8byte
+patterns.utf8four = R("\240\244") * utf8byte * utf8byte * utf8byte
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = S(" ")
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = crlf + cr + lf
+patterns.nonspace = 1 - patterns.space
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191')
function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * lpeg.V(1) }
-end
-
-function lpeg.startswith(pattern) --slightly adapted
- return P(pattern)
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
end
function lpeg.splitter(pattern, action)
return (((1-P(pattern))^1)/action+1)^0
end
--- variant:
-
---~ local parser = lpeg.Ct(lpeg.splitat(newline))
-
-local crlf = P("\r\n")
-local cr = P("\r")
-local lf = P("\n")
-local space = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-local newline = crlf + cr + lf
-local spacing = space^0 * newline
-
+local spacing = patterns.spacer^0 * patterns.newline -- sort of strip
local empty = spacing * Cc("")
local nonempty = Cs((1-spacing)^1) * spacing^-1
local content = (empty + nonempty)^1
@@ -346,15 +407,15 @@ local content = (empty + nonempty)^1
local capture = Ct(content^0)
function string:splitlines()
- return capture:match(self)
+ return match(capture,self)
end
-lpeg.linebyline = content -- better make a sublibrary
+patterns.textline = content
---~ local p = lpeg.splitat("->",false) print(p:match("oeps->what->more")) -- oeps what more
---~ local p = lpeg.splitat("->",true) print(p:match("oeps->what->more")) -- oeps what->more
---~ local p = lpeg.splitat("->",false) print(p:match("oeps")) -- oeps
---~ local p = lpeg.splitat("->",true) print(p:match("oeps")) -- oeps
+--~ local p = lpeg.splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more
+--~ local p = lpeg.splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more
+--~ local p = lpeg.splitat("->",false) print(match(p,"oeps")) -- oeps
+--~ local p = lpeg.splitat("->",true) print(match(p,"oeps")) -- oeps
local splitters_s, splitters_m = { }, { }
@@ -364,7 +425,7 @@ local function splitat(separator,single)
separator = P(separator)
if single then
local other, any = C((1 - separator)^0), P(1)
- splitter = other * (separator * C(any^0) + "")
+ splitter = other * (separator * C(any^0) + "") -- ?
splitters_s[separator] = splitter
else
local other = C((1 - separator)^0)
@@ -379,15 +440,72 @@ lpeg.splitat = splitat
local cache = { }
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = Ct(splitat(separator))
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
function string:split(separator)
local c = cache[separator]
if not c then
c = Ct(splitat(separator))
cache[separator] = c
end
- return c:match(self)
+ return match(c,self)
+end
+
+lpeg.splitters = cache
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string:checkedsplit(separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,self)
end
+--~ function lpeg.append(list,pp)
+--~ local p = pp
+--~ for l=1,#list do
+--~ if p then
+--~ p = p + P(list[l])
+--~ else
+--~ p = P(list[l])
+--~ end
+--~ end
+--~ return p
+--~ end
+
+--~ from roberto's site:
+
+local f1 = string.byte
+
+local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
end -- of closure
@@ -395,7 +513,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -404,9 +522,58 @@ if not modules then modules = { } end modules ['l-table'] = {
table.join = table.concat
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
-local format, find, gsub, lower, dump = string.format, string.find, string.gsub, string.lower, string.dump
+local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
-local type, next, tostring, ipairs = type, next, tostring, ipairs
+local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs
+
+-- Starting with version 5.2 Lua no longer provide ipairs, which makes
+-- sense. As we already used the for loop and # in most places the
+-- impact on ConTeXt was not that large; the remaining ipairs already
+-- have been replaced. In a similar fashio we also hardly used pairs.
+--
+-- Just in case, we provide the fallbacks as discussed in Programming
+-- in Lua (http://www.lua.org/pil/7.3.html):
+
+if not ipairs then
+
+ -- for k, v in ipairs(t) do ... end
+ -- for k=1,#t do local v = t[k] ... end
+
+ local function iterate(a,i)
+ i = i + 1
+ local v = a[i]
+ if v ~= nil then
+ return i, v --, nil
+ end
+ end
+
+ function ipairs(a)
+ return iterate, a, 0
+ end
+
+end
+
+if not pairs then
+
+ -- for k, v in pairs(t) do ... end
+ -- for k, v in next, t do ... end
+
+ function pairs(t)
+ return next, t -- , nil
+ end
+
+end
+
+-- Also, unpack has been moved to the table table, and for compatiility
+-- reasons we provide both now.
+
+if not table.unpack then
+ table.unpack = _G.unpack
+elseif not unpack then
+ _G.unpack = table.unpack
+end
+
+-- extra functions, some might go (when not used)
function table.strip(tab)
local lst = { }
@@ -421,6 +588,14 @@ function table.strip(tab)
return lst
end
+function table.keys(t)
+ local k = { }
+ for key, _ in next, t do
+ k[#k+1] = key
+ end
+ return k
+end
+
local function compare(a,b)
return (tostring(a) < tostring(b))
end
@@ -464,7 +639,7 @@ end
table.sortedkeys = sortedkeys
table.sortedhashkeys = sortedhashkeys
-function table.sortedpairs(t)
+function table.sortedhash(t)
local s = sortedhashkeys(t) -- maybe just sortedkeys
local n = 0
local function kv(s)
@@ -475,6 +650,8 @@ function table.sortedpairs(t)
return kv, s
end
+table.sortedpairs = table.sortedhash
+
function table.append(t, list)
for _,v in next, list do
insert(t,v)
@@ -583,7 +760,7 @@ end
table.fastcopy = fastcopy
table.copy = copy
--- rougly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
+-- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
function table.sub(t,i,j)
return { unpack(t,i,j) }
@@ -597,18 +774,18 @@ end
-- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
-function table.is_empty(t)
+function table.is_empty(t) -- obolete, use inline code instead
return not t or not next(t)
end
-function table.one_entry(t)
+function table.one_entry(t) -- obolete, use inline code instead
local n = next(t)
return n and not next(t,n)
end
-function table.starts_at(t)
- return ipairs(t,1)(t,0)
-end
+--~ function table.starts_at(t) -- obsolete, not nice anyway
+--~ return ipairs(t,1)(t,0)
+--~ end
function table.tohash(t,value)
local h = { }
@@ -686,6 +863,8 @@ end
--
-- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) )
+-- problem: there no good number_to_string converter with the best resolution
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -708,8 +887,9 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s{",depth))
end
end
+ -- we could check for k (index) being number (cardinal)
if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here
+ local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
if compact then
-- NOT: for k=1,#root do (we need to quit at nil)
for k,v in ipairs(root) do -- can we use next?
@@ -730,10 +910,10 @@ local function do_serialize(root,name,depth,level,indexed)
if hexify then
handle(format("%s 0x%04X,",depth,v))
else
- handle(format("%s %s,",depth,v))
+ handle(format("%s %s,",depth,v)) -- %.99g
end
elseif t == "string" then
- if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then
+ if reduce and tonumber(v) then
handle(format("%s %s,",depth,v))
else
handle(format("%s %q,",depth,v))
@@ -770,29 +950,29 @@ local function do_serialize(root,name,depth,level,indexed)
--~ if hexify then
--~ handle(format("%s %s=0x%04X,",depth,key(k),v))
--~ else
- --~ handle(format("%s %s=%s,",depth,key(k),v))
+ --~ handle(format("%s %s=%s,",depth,key(k),v)) -- %.99g
--~ end
if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
else
- handle(format("%s [%s]=%s,",depth,k,v))
+ handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g
end
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
if hexify then
handle(format("%s %s=0x%04X,",depth,k,v))
else
- handle(format("%s %s=%s,",depth,k,v))
+ handle(format("%s %s=%s,",depth,k,v)) -- %.99g
end
else
if hexify then
handle(format("%s [%q]=0x%04X,",depth,k,v))
else
- handle(format("%s [%q]=%s,",depth,k,v))
+ handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g
end
end
elseif t == "string" then
- if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then
+ if reduce and tonumber(v) then
--~ handle(format("%s %s=%s,",depth,key(k),v))
if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
@@ -1001,7 +1181,7 @@ function table.tofile(filename,root,name,reduce,noquotes,hexify)
end
end
-local function flatten(t,f,complete)
+local function flatten(t,f,complete) -- is this used? meybe a variant with next, ...
for i=1,#t do
local v = t[i]
if type(v) == "table" then
@@ -1030,6 +1210,24 @@ end
table.flatten_one_level = table.unnest
+-- a better one:
+
+local function flattened(t,f)
+ if not f then
+ f = { }
+ end
+ for k, v in next, t do
+ if type(v) == "table" then
+ flattened(v,f)
+ else
+ f[k] = v
+ end
+ end
+ return f
+end
+
+table.flattened = flattened
+
-- the next three may disappear
function table.remove_value(t,value) -- todo: n
@@ -1165,7 +1363,7 @@ function table.clone(t,p) -- t is optional or nil or table
elseif not t then
t = { }
end
- setmetatable(t, { __index = function(_,key) return p[key] end })
+ setmetatable(t, { __index = function(_,key) return p[key] end }) -- why not __index = p ?
return t
end
@@ -1193,21 +1391,36 @@ function table.reverse(t)
return tt
end
---~ function table.keys(t)
---~ local k = { }
---~ for k,_ in next, t do
---~ k[#k+1] = k
---~ end
---~ return k
---~ end
+function table.insert_before_value(t,value,extra)
+ for i=1,#t do
+ if t[i] == extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i] == value then
+ insert(t,i,extra)
+ return
+ end
+ end
+ insert(t,1,extra)
+end
+
+function table.insert_after_value(t,value,extra)
+ for i=1,#t do
+ if t[i] == extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i] == value then
+ insert(t,i+1,extra)
+ return
+ end
+ end
+ insert(t,#t+1,extra)
+end
---~ function table.keys_as_string(t)
---~ local k = { }
---~ for k,_ in next, t do
---~ k[#k+1] = k
---~ end
---~ return concat(k,"")
---~ end
end -- of closure
@@ -1216,13 +1429,13 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['l-io'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-local byte = string.byte
+local byte, find, gsub = string.byte, string.find, string.gsub
if string.find(os.getenv("PATH"),";") then
io.fileseparator, io.pathseparator = "\\", ";"
@@ -1251,7 +1464,7 @@ function io.savedata(filename,data,joiner)
elseif type(data) == "function" then
data(f)
else
- f:write(data)
+ f:write(data or "")
end
f:close()
return true
@@ -1380,20 +1593,21 @@ function io.ask(question,default,options)
end
io.write(string.format(" "))
local answer = io.read()
- answer = answer:gsub("^%s*(.*)%s*$","%1")
+ answer = gsub(answer,"^%s*(.*)%s*$","%1")
if answer == "" and default then
return default
elseif not options then
return answer
else
- for _,v in pairs(options) do
- if v == answer then
+ for k=1,#options do
+ if options[k] == answer then
return answer
end
end
local pattern = "^" .. answer
- for _,v in pairs(options) do
- if v:find(pattern) then
+ for k=1,#options do
+ local v = options[k]
+ if find(v,pattern) then
return v
end
end
@@ -1408,20 +1622,22 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['l-number'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-local format = string.format
+local tostring = tostring
+local format, floor, insert, match = string.format, math.floor, table.insert, string.match
+local lpegmatch = lpeg.match
number = number or { }
-- a,b,c,d,e,f = number.toset(100101)
function number.toset(n)
- return (tostring(n)):match("(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)")
+ return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)")
end
function number.toevenhex(n)
@@ -1447,10 +1663,21 @@ end
local one = lpeg.C(1-lpeg.S(''))^1
function number.toset(n)
- return one:match(tostring(n))
+ return lpegmatch(one,tostring(n))
end
-
+function number.bits(n,zero)
+ local t, i = { }, (zero and 0) or 1
+ while n > 0 do
+ local m = n % 2
+ if m > 0 then
+ insert(t,1,i)
+ end
+ n = floor(n/2)
+ i = i + 1
+ end
+ return t
+end
end -- of closure
@@ -1459,7 +1686,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['l-set'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -1549,46 +1776,63 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['l-os'] = {
version = 1.001,
- comment = "companion to luat-lub.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-local find = string.find
+-- maybe build io.flush in os.execute
+
+local find, format, gsub = string.find, string.format, string.gsub
+local random, ceil = math.random, math.ceil
+
+local execute, spawn, exec, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.flush
+
+function os.execute(...) ioflush() return execute(...) end
+function os.spawn (...) ioflush() return spawn (...) end
+function os.exec (...) ioflush() return exec (...) end
function os.resultof(command)
- return io.popen(command,"r"):read("*all")
+ ioflush() -- else messed up logging
+ local handle = io.popen(command,"r")
+ if not handle then
+ -- print("unknown command '".. command .. "' in os.resultof")
+ return ""
+ else
+ return handle:read("*all") or ""
+ end
end
-if not os.exec then os.exec = os.execute end
-if not os.spawn then os.spawn = os.execute end
-
---~ os.type : windows | unix (new, we already guessed os.platform)
---~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
+--~ os.type : windows | unix (new, we already guessed os.platform)
+--~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
+--~ os.platform : extended os.name with architecture
if not io.fileseparator then
if find(os.getenv("PATH"),";") then
- io.fileseparator, io.pathseparator, os.platform = "\\", ";", os.type or "windows"
+ io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin"
else
- io.fileseparator, io.pathseparator, os.platform = "/" , ":", os.type or "unix"
+ io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix"
end
end
-os.platform = os.platform or os.type or (io.pathseparator == ";" and "windows") or "unix"
+os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
+os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
+
+if os.type == "windows" then
+ os.libsuffix, os.binsuffix = 'dll', 'exe'
+else
+ os.libsuffix, os.binsuffix = 'so', ''
+end
function os.launch(str)
- if os.platform == "windows" then
+ if os.type == "windows" then
os.execute("start " .. str) -- os.spawn ?
else
os.execute(str .. " &") -- os.spawn ?
end
end
-if not os.setenv then
- function os.setenv() return false end
-end
-
if not os.times then
-- utime = user time
-- stime = system time
@@ -1618,64 +1862,218 @@ end
--~ print(os.date("%H:%M:%S",os.gettimeofday()))
--~ print(os.date("%H:%M:%S",os.time()))
-os.arch = os.arch or function()
- local a = os.resultof("uname -m") or "linux"
- os.arch = function()
- return a
+-- no need for function anymore as we have more clever code and helpers now
+-- this metatable trickery might as well disappear
+
+os.resolvers = os.resolvers or { }
+
+local resolvers = os.resolvers
+
+local osmt = getmetatable(os) or { __index = function(t,k) t[k] = "unset" return "unset" end } -- maybe nil
+local osix = osmt.__index
+
+osmt.__index = function(t,k)
+ return (resolvers[k] or osix)(t,k)
+end
+
+setmetatable(os,osmt)
+
+if not os.setenv then
+
+ -- we still store them but they won't be seen in
+ -- child processes although we might pass them some day
+ -- using command concatination
+
+ local env, getenv = { }, os.getenv
+
+ function os.setenv(k,v)
+ env[k] = v
+ end
+
+ function os.getenv(k)
+ return env[k] or getenv(k)
end
- return a
+
end
-local platform
+-- we can use HOSTTYPE on some platforms
-function os.currentplatform(name,default)
- if not platform then
- local name = os.name or os.platform or name -- os.name is built in, os.platform is mine
- if not name then
- platform = default or "linux"
- elseif name == "windows" or name == "mswin" or name == "win32" or name == "msdos" then
- if os.getenv("PROCESSOR_ARCHITECTURE") == "AMD64" then
- platform = "mswin-64"
- else
- platform = "mswin"
- end
+local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or ""
+
+local function guess()
+ local architecture = os.resultof("uname -m") or ""
+ if architecture ~= "" then
+ return architecture
+ end
+ architecture = os.getenv("HOSTTYPE") or ""
+ if architecture ~= "" then
+ return architecture
+ end
+ return os.resultof("echo $HOSTTYPE") or ""
+end
+
+if platform ~= "" then
+
+ os.platform = platform
+
+elseif os.type == "windows" then
+
+ -- we could set the variable directly, no function needed here
+
+ function os.resolvers.platform(t,k)
+ local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or ""
+ if find(architecture,"AMD64") then
+ platform = "mswin-64"
else
- local architecture = os.arch()
- if name == "linux" then
- if find(architecture,"x86_64") then
- platform = "linux-64"
- elseif find(architecture,"ppc") then
- platform = "linux-ppc"
- else
- platform = "linux"
- end
- elseif name == "macosx" then
- if find(architecture,"i386") then
- platform = "osx-intel"
- else
- platform = "osx-ppc"
- end
- elseif name == "sunos" then
- if find(architecture,"sparc") then
- platform = "solaris-sparc"
- else -- if architecture == 'i86pc'
- platform = "solaris-intel"
- end
- elseif name == "freebsd" then
- if find(architecture,"amd64") then
- platform = "freebsd-amd64"
- else
- platform = "freebsd"
- end
- else
- platform = default or name
- end
+ platform = "mswin"
end
- function os.currentplatform()
- return platform
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+elseif name == "linux" then
+
+ function os.resolvers.platform(t,k)
+ -- we sometims have HOSTTYPE set so let's check that first
+ local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64") then
+ platform = "linux-64"
+ elseif find(architecture,"ppc") then
+ platform = "linux-ppc"
+ else
+ platform = "linux"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+elseif name == "macosx" then
+
+ --[[
+ Identifying the architecture of OSX is quite a mess and this
+ is the best we can come up with. For some reason $HOSTTYPE is
+ a kind of pseudo environment variable, not known to the current
+ environment. And yes, uname cannot be trusted either, so there
+ is a change that you end up with a 32 bit run on a 64 bit system.
+ Also, some proper 64 bit intel macs are too cheap (low-end) and
+ therefore not permitted to run the 64 bit kernel.
+ ]]--
+
+ function os.resolvers.platform(t,k)
+ -- local platform, architecture = "", os.getenv("HOSTTYPE") or ""
+ -- if architecture == "" then
+ -- architecture = os.resultof("echo $HOSTTYPE") or ""
+ -- end
+ local platform, architecture = "", os.resultof("echo $HOSTTYPE") or ""
+ if architecture == "" then
+ -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n")
+ platform = "osx-intel"
+ elseif find(architecture,"i386") then
+ platform = "osx-intel"
+ elseif find(architecture,"x86_64") then
+ platform = "osx-64"
+ else
+ platform = "osx-ppc"
end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+elseif name == "sunos" then
+
+ function os.resolvers.platform(t,k)
+ local platform, architecture = "", os.resultof("uname -m") or ""
+ if find(architecture,"sparc") then
+ platform = "solaris-sparc"
+ else -- if architecture == 'i86pc'
+ platform = "solaris-intel"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+elseif name == "freebsd" then
+
+ function os.resolvers.platform(t,k)
+ local platform, architecture = "", os.resultof("uname -m") or ""
+ if find(architecture,"amd64") then
+ platform = "freebsd-amd64"
+ else
+ platform = "freebsd"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+elseif name == "kfreebsd" then
+
+ function os.resolvers.platform(t,k)
+ -- we sometims have HOSTTYPE set so let's check that first
+ local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64") then
+ platform = "kfreebsd-64"
+ else
+ platform = "kfreebsd-i386"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+else
+
+ -- platform = "linux"
+ -- os.setenv("MTX_PLATFORM",platform)
+ -- os.platform = platform
+
+ function os.resolvers.platform(t,k)
+ local platform = "linux"
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+end
+
+-- beware, we set the randomseed
+
+-- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the
+-- version number as well as two reserved bits. All other bits are set using a random or pseudorandom
+-- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal
+-- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479.
+--
+-- as we don't call this function too often there is not so much risk on repetition
+
+local t = { 8, 9, "a", "b" }
+
+function os.uuid()
+ return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x",
+ random(0xFFFF),random(0xFFFF),
+ random(0x0FFF),
+ t[ceil(random(4))] or 8,random(0x0FFF),
+ random(0xFFFF),
+ random(0xFFFF),random(0xFFFF),random(0xFFFF)
+ )
+end
+
+local d
+
+function os.timezone(delta)
+ d = d or tonumber(tonumber(os.date("%H")-os.date("!%H")))
+ if delta then
+ if d > 0 then
+ return format("+%02i:00",d)
+ else
+ return format("-%02i:00",-d)
+ end
+ else
+ return 1
end
- return platform
end
@@ -1685,7 +2083,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['l-file'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -1696,14 +2094,17 @@ if not modules then modules = { } end modules ['l-file'] = {
file = file or { }
local concat = table.concat
-local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
+local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char
+local lpegmatch = lpeg.match
function file.removesuffix(filename)
return (gsub(filename,"%.[%a%d]+$",""))
end
function file.addsuffix(filename, suffix)
- if not find(filename,"%.[%a%d]+$") then
+ if not suffix or suffix == "" then
+ return filename
+ elseif not find(filename,"%.[%a%d]+$") then
return filename .. "." .. suffix
else
return filename
@@ -1726,20 +2127,39 @@ function file.nameonly(name)
return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
end
-function file.extname(name)
- return match(name,"^.+%.([^/\\]-)$") or ""
+function file.extname(name,default)
+ return match(name,"^.+%.([^/\\]-)$") or default or ""
end
file.suffix = file.extname
---~ print(file.join("x/","/y"))
---~ print(file.join("http://","/y"))
---~ print(file.join("http://a","/y"))
---~ print(file.join("http:///a","/y"))
---~ print(file.join("//nas-1","/y"))
+--~ function file.join(...)
+--~ local pth = concat({...},"/")
+--~ pth = gsub(pth,"\\","/")
+--~ local a, b = match(pth,"^(.*://)(.*)$")
+--~ if a and b then
+--~ return a .. gsub(b,"//+","/")
+--~ end
+--~ a, b = match(pth,"^(//)(.*)$")
+--~ if a and b then
+--~ return a .. gsub(b,"//+","/")
+--~ end
+--~ return (gsub(pth,"//+","/"))
+--~ end
+
+local trick_1 = char(1)
+local trick_2 = "^" .. trick_1 .. "/+"
function file.join(...)
- local pth = concat({...},"/")
+ local lst = { ... }
+ local a, b = lst[1], lst[2]
+ if a == "" then
+ lst[1] = trick_1
+ elseif b and find(a,"^/+$") and find(b,"^/") then
+ lst[1] = ""
+ lst[2] = gsub(b,"^/+","")
+ end
+ local pth = concat(lst,"/")
pth = gsub(pth,"\\","/")
local a, b = match(pth,"^(.*://)(.*)$")
if a and b then
@@ -1749,17 +2169,28 @@ function file.join(...)
if a and b then
return a .. gsub(b,"//+","/")
end
+ pth = gsub(pth,trick_2,"")
return (gsub(pth,"//+","/"))
end
+--~ print(file.join("//","/y"))
+--~ print(file.join("/","/y"))
+--~ print(file.join("","/y"))
+--~ print(file.join("/x/","/y"))
+--~ print(file.join("x/","/y"))
+--~ print(file.join("http://","/y"))
+--~ print(file.join("http://a","/y"))
+--~ print(file.join("http:///a","/y"))
+--~ print(file.join("//nas-1","/y"))
+
function file.iswritable(name)
local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,"."))
- return a and a.permissions:sub(2,2) == "w"
+ return a and sub(a.permissions,2,2) == "w"
end
function file.isreadable(name)
local a = lfs.attributes(name)
- return a and a.permissions:sub(1,1) == "r"
+ return a and sub(a.permissions,1,1) == "r"
end
file.is_readable = file.isreadable
@@ -1767,36 +2198,50 @@ file.is_writable = file.iswritable
-- todo: lpeg
-function file.split_path(str)
- local t = { }
- str = gsub(str,"\\", "/")
- str = gsub(str,"(%a):([;/])", "%1\001%2")
- for name in gmatch(str,"([^;:]+)") do
- if name ~= "" then
- t[#t+1] = gsub(name,"\001",":")
- end
- end
- return t
+--~ function file.split_path(str)
+--~ local t = { }
+--~ str = gsub(str,"\\", "/")
+--~ str = gsub(str,"(%a):([;/])", "%1\001%2")
+--~ for name in gmatch(str,"([^;:]+)") do
+--~ if name ~= "" then
+--~ t[#t+1] = gsub(name,"\001",":")
+--~ end
+--~ end
+--~ return t
+--~ end
+
+local checkedsplit = string.checkedsplit
+
+function file.split_path(str,separator)
+ str = gsub(str,"\\","/")
+ return checkedsplit(str,separator or io.pathseparator)
end
function file.join_path(tab)
return concat(tab,io.pathseparator) -- can have trailing //
end
+-- we can hash them weakly
+
function file.collapse_path(str)
- str = gsub(str,"/%./","/")
- local n, m = 1, 1
- while n > 0 or m > 0 do
- str, n = gsub(str,"[^/%.]+/%.%.$","")
- str, m = gsub(str,"[^/%.]+/%.%./","")
- end
- str = gsub(str,"([^/])/$","%1")
- str = gsub(str,"^%./","")
- str = gsub(str,"/%.$","")
+ str = gsub(str,"\\","/")
+ if find(str,"/") then
+ str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified
+ str = gsub(str,"/%./","/")
+ local n, m = 1, 1
+ while n > 0 or m > 0 do
+ str, n = gsub(str,"[^/%.]+/%.%.$","")
+ str, m = gsub(str,"[^/%.]+/%.%./","")
+ end
+ str = gsub(str,"([^/])/$","%1")
+ -- str = gsub(str,"^%./","") -- ./xx in qualified
+ str = gsub(str,"/%.$","")
+ end
if str == "" then str = "." end
return str
end
+--~ print(file.collapse_path("/a"))
--~ print(file.collapse_path("a/./b/.."))
--~ print(file.collapse_path("a/aa/../b/bb"))
--~ print(file.collapse_path("a/../.."))
@@ -1826,27 +2271,27 @@ end
--~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.C(noperiod^1) * -1
--~ function file.extname(name)
---~ return pattern:match(name) or ""
+--~ return lpegmatch(pattern,name) or ""
--~ end
--~ local pattern = lpeg.Cs(((period * noperiod^1 * -1)/"" + 1)^1)
--~ function file.removesuffix(name)
---~ return pattern:match(name)
+--~ return lpegmatch(pattern,name)
--~ end
--~ local pattern = (noslashes^0 * slashes)^1 * lpeg.C(noslashes^1) * -1
--~ function file.basename(name)
---~ return pattern:match(name) or name
+--~ return lpegmatch(pattern,name) or name
--~ end
--~ local pattern = (noslashes^0 * slashes)^1 * lpeg.Cp() * noslashes^1 * -1
--~ function file.dirname(name)
---~ local p = pattern:match(name)
+--~ local p = lpegmatch(pattern,name)
--~ if p then
---~ return name:sub(1,p-2)
+--~ return sub(name,1,p-2)
--~ else
--~ return ""
--~ end
@@ -1855,7 +2300,7 @@ end
--~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
--~ function file.addsuffix(name, suffix)
---~ local p = pattern:match(name)
+--~ local p = lpegmatch(pattern,name)
--~ if p then
--~ return name
--~ else
@@ -1866,9 +2311,9 @@ end
--~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
--~ function file.replacesuffix(name,suffix)
---~ local p = pattern:match(name)
+--~ local p = lpegmatch(pattern,name)
--~ if p then
---~ return name:sub(1,p-2) .. "." .. suffix
+--~ return sub(name,1,p-2) .. "." .. suffix
--~ else
--~ return name .. "." .. suffix
--~ end
@@ -1877,11 +2322,11 @@ end
--~ local pattern = (noslashes^0 * slashes)^0 * lpeg.Cp() * ((noperiod^1 * period)^1 * lpeg.Cp() + lpeg.P(true)) * noperiod^1 * -1
--~ function file.nameonly(name)
---~ local a, b = pattern:match(name)
+--~ local a, b = lpegmatch(pattern,name)
--~ if b then
---~ return name:sub(a,b-2)
+--~ return sub(name,a,b-2)
--~ elseif a then
---~ return name:sub(a)
+--~ return sub(name,a)
--~ else
--~ return name
--~ end
@@ -1915,11 +2360,11 @@ local rootbased = lpeg.P("/") + letter*lpeg.P(":")
-- ./name ../name /name c: :// name/name
function file.is_qualified_path(filename)
- return qualified:match(filename)
+ return lpegmatch(qualified,filename) ~= nil
end
function file.is_rootbased_path(filename)
- return rootbased:match(filename)
+ return lpegmatch(rootbased,filename) ~= nil
end
local slash = lpeg.S("\\/")
@@ -1932,16 +2377,25 @@ local base = lpeg.C((1-suffix)^0)
local pattern = (drive + lpeg.Cc("")) * (path + lpeg.Cc("")) * (base + lpeg.Cc("")) * (suffix + lpeg.Cc(""))
function file.splitname(str) -- returns drive, path, base, suffix
- return pattern:match(str)
+ return lpegmatch(pattern,str)
end
--- function test(t) for k, v in pairs(t) do print(v, "=>", file.splitname(v)) end end
+-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
--
-- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
-- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" }
-- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
-- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
+--~ -- todo:
+--~
+--~ if os.type == "windows" then
+--~ local currentdir = lfs.currentdir
+--~ function lfs.currentdir()
+--~ return (gsub(currentdir(),"\\","/"))
+--~ end
+--~ end
+
end -- of closure
@@ -2006,7 +2460,7 @@ end
function file.loadchecksum(name)
if md5 then
local data = io.loaddata(name .. ".md5")
- return data and data:gsub("%s","")
+ return data and (gsub(data,"%s",""))
end
return nil
end
@@ -2025,19 +2479,168 @@ end -- of closure
do -- create closure to overcome 200 locals limit
+if not modules then modules = { } end modules ['l-url'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local char, gmatch, gsub = string.char, string.gmatch, string.gsub
+local tonumber, type = tonumber, type
+local lpegmatch = lpeg.match
+
+-- from the spec (on the web):
+--
+-- foo://example.com:8042/over/there?name=ferret#nose
+-- \_/ \______________/\_________/ \_________/ \__/
+-- | | | | |
+-- scheme authority path query fragment
+-- | _____________________|__
+-- / \ / \
+-- urn:example:animal:ferret:nose
+
+url = url or { }
+
+local function tochar(s)
+ return char(tonumber(s,16))
+end
+
+local colon, qmark, hash, slash, percent, endofstring = lpeg.P(":"), lpeg.P("?"), lpeg.P("#"), lpeg.P("/"), lpeg.P("%"), lpeg.P(-1)
+
+local hexdigit = lpeg.R("09","AF","af")
+local plus = lpeg.P("+")
+local escaped = (plus / " ") + (percent * lpeg.C(hexdigit * hexdigit) / tochar)
+
+-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+
+local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + lpeg.Cc("")
+local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("")
+local path = slash * lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("")
+local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("")
+local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0) + lpeg.Cc("")
+
+local parser = lpeg.Ct(scheme * authority * path * query * fragment)
+
+-- todo: reconsider Ct as we can as well have five return values (saves a table)
+-- so we can have two parsers, one with and one without
+
+function url.split(str)
+ return (type(str) == "string" and lpegmatch(parser,str)) or str
+end
+
+-- todo: cache them
+
+function url.hashed(str)
+ local s = url.split(str)
+ local somescheme = s[1] ~= ""
+ return {
+ scheme = (somescheme and s[1]) or "file",
+ authority = s[2],
+ path = s[3],
+ query = s[4],
+ fragment = s[5],
+ original = str,
+ noscheme = not somescheme,
+ }
+end
+
+function url.hasscheme(str)
+ return url.split(str)[1] ~= ""
+end
+
+function url.addscheme(str,scheme)
+ return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str)
+end
+
+function url.construct(hash)
+ local fullurl = hash.sheme .. "://".. hash.authority .. hash.path
+ if hash.query then
+ fullurl = fullurl .. "?".. hash.query
+ end
+ if hash.fragment then
+ fullurl = fullurl .. "?".. hash.fragment
+ end
+ return fullurl
+end
+
+function url.filename(filename)
+ local t = url.hashed(filename)
+ return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
+end
+
+function url.query(str)
+ if type(str) == "string" then
+ local t = { }
+ for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do
+ t[k] = v
+ end
+ return t
+ else
+ return str
+ end
+end
+
+--~ print(url.filename("file:///c:/oeps.txt"))
+--~ print(url.filename("c:/oeps.txt"))
+--~ print(url.filename("file:///oeps.txt"))
+--~ print(url.filename("file:///etc/test.txt"))
+--~ print(url.filename("/oeps.txt"))
+
+--~ from the spec on the web (sort of):
+--~
+--~ function test(str)
+--~ print(table.serialize(url.hashed(str)))
+--~ end
+--~
+--~ test("%56pass%20words")
+--~ test("file:///c:/oeps.txt")
+--~ test("file:///c|/oeps.txt")
+--~ test("file:///etc/oeps.txt")
+--~ test("file://./etc/oeps.txt")
+--~ test("file:////etc/oeps.txt")
+--~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt")
+--~ test("http://www.ietf.org/rfc/rfc2396.txt")
+--~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what")
+--~ test("mailto:John.Doe@example.com")
+--~ test("news:comp.infosystems.www.servers.unix")
+--~ test("tel:+1-816-555-1212")
+--~ test("telnet://192.0.2.16:80/")
+--~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2")
+--~ test("/etc/passwords")
+--~ test("http://www.pragma-ade.com/spaced%20name")
+
+--~ test("zip:///oeps/oeps.zip#bla/bla.tex")
+--~ test("zip:///oeps/oeps.zip?bla/bla.tex")
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
if not modules then modules = { } end modules ['l-dir'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
+-- dir.expand_name will be merged with cleanpath and collapsepath
+
local type = type
-local find, gmatch = string.find, string.gmatch
+local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
+local lpegmatch = lpeg.match
dir = dir or { }
+-- handy
+
+function dir.current()
+ return (gsub(lfs.currentdir(),"\\","/"))
+end
+
-- optimizing for no string.find (*) does not save time
local attributes = lfs.attributes
@@ -2068,6 +2671,35 @@ end
dir.glob_pattern = glob_pattern
+local function collect_pattern(path,patt,recurse,result)
+ local ok, scanner
+ result = result or { }
+ if path == "/" then
+ ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ else
+ ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ end
+ if ok and type(scanner) == "function" then
+ if not find(path,"/$") then path = path .. '/' end
+ for name in scanner do
+ local full = path .. name
+ local attr = attributes(full)
+ local mode = attr.mode
+ if mode == 'file' then
+ if find(full,patt) then
+ result[name] = attr
+ end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ attr.list = collect_pattern(full,patt,recurse)
+ result[name] = attr
+ end
+ end
+ end
+ return result
+end
+
+dir.collect_pattern = collect_pattern
+
local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V
local pattern = Ct {
@@ -2087,29 +2719,48 @@ local filter = Cs ( (
)^0 )
local function glob(str,t)
- if type(str) == "table" then
- local t = t or { }
- for s=1,#str do
- glob(str[s],t)
+ if type(t) == "function" then
+ if type(str) == "table" then
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ elseif lfs.isfile(str) then
+ t(str)
+ else
+ local split = lpegmatch(pattern,str)
+ if split then
+ local root, path, base = split[1], split[2], split[3]
+ local recurse = find(base,"%*%*")
+ local start = root .. path
+ local result = lpegmatch(filter,start .. base)
+ glob_pattern(start,result,recurse,t)
+ end
end
- return t
- elseif lfs.isfile(str) then
- local t = t or { }
- t[#t+1] = str
- return t
else
- local split = pattern:match(str)
- if split then
+ if type(str) == "table" then
local t = t or { }
- local action = action or function(name) t[#t+1] = name end
- local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
- local start = root .. path
- local result = filter:match(start .. base)
- glob_pattern(start,result,recurse,action)
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ return t
+ elseif lfs.isfile(str) then
+ local t = t or { }
+ t[#t+1] = str
return t
else
- return { }
+ local split = lpegmatch(pattern,str)
+ if split then
+ local t = t or { }
+ local action = action or function(name) t[#t+1] = name end
+ local root, path, base = split[1], split[2], split[3]
+ local recurse = find(base,"%*%*")
+ local start = root .. path
+ local result = lpegmatch(filter,start .. base)
+ glob_pattern(start,result,recurse,action)
+ return t
+ else
+ return { }
+ end
end
end
end
@@ -2171,11 +2822,12 @@ end
local make_indeed = true -- false
-if string.find(os.getenv("PATH"),";") then
+if string.find(os.getenv("PATH"),";") then -- os.type == "windows"
function dir.mkdirs(...)
- local str, pth = "", ""
- for _, s in ipairs({...}) do
+ local str, pth, t = "", "", { ... }
+ for i=1,#t do
+ local s = t[i]
if s ~= "" then
if str ~= "" then
str = str .. "/" .. s
@@ -2186,13 +2838,13 @@ if string.find(os.getenv("PATH"),";") then
end
local first, middle, last
local drive = false
- first, middle, last = str:match("^(//)(//*)(.*)$")
+ first, middle, last = match(str,"^(//)(//*)(.*)$")
if first then
-- empty network path == local path
else
- first, last = str:match("^(//)/*(.-)$")
+ first, last = match(str,"^(//)/*(.-)$")
if first then
- middle, last = str:match("([^/]+)/+(.-)$")
+ middle, last = match(str,"([^/]+)/+(.-)$")
if middle then
pth = "//" .. middle
else
@@ -2200,11 +2852,11 @@ if string.find(os.getenv("PATH"),";") then
last = ""
end
else
- first, middle, last = str:match("^([a-zA-Z]:)(/*)(.-)$")
+ first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$")
if first then
pth, drive = first .. middle, true
else
- middle, last = str:match("^(/*)(.-)$")
+ middle, last = match(str,"^(/*)(.-)$")
if not middle then
last = str
end
@@ -2238,34 +2890,31 @@ if string.find(os.getenv("PATH"),";") then
--~ print(dir.mkdirs("///a/b/c"))
--~ print(dir.mkdirs("a/bbb//ccc/"))
- function dir.expand_name(str)
- local first, nothing, last = str:match("^(//)(//*)(.*)$")
+ function dir.expand_name(str) -- will be merged with cleanpath and collapsepath
+ local first, nothing, last = match(str,"^(//)(//*)(.*)$")
if first then
- first = lfs.currentdir() .. "/"
- first = first:gsub("\\","/")
+ first = dir.current() .. "/"
end
if not first then
- first, last = str:match("^(//)/*(.*)$")
+ first, last = match(str,"^(//)/*(.*)$")
end
if not first then
- first, last = str:match("^([a-zA-Z]:)(.*)$")
+ first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = lfs.currentdir()
if lfs.chdir(first) then
- first = lfs.currentdir()
- first = first:gsub("\\","/")
+ first = dir.current()
end
lfs.chdir(d)
end
end
if not first then
- first, last = lfs.currentdir(), str
- first = first:gsub("\\","/")
+ first, last = dir.current(), str
end
- last = last:gsub("//","/")
- last = last:gsub("/%./","/")
- last = last:gsub("^/*","")
- first = first:gsub("/*$","")
+ last = gsub(last,"//","/")
+ last = gsub(last,"/%./","/")
+ last = gsub(last,"^/*","")
+ first = gsub(first,"/*$","")
if last == "" then
return first
else
@@ -2276,8 +2925,9 @@ if string.find(os.getenv("PATH"),";") then
else
function dir.mkdirs(...)
- local str, pth = "", ""
- for _, s in ipairs({...}) do
+ local str, pth, t = "", "", { ... }
+ for i=1,#t do
+ local s = t[i]
if s ~= "" then
if str ~= "" then
str = str .. "/" .. s
@@ -2286,7 +2936,7 @@ else
end
end
end
- str = str:gsub("/+","/")
+ str = gsub(str,"/+","/")
if find(str,"^/") then
pth = "/"
for s in gmatch(str,"[^/]+") do
@@ -2320,12 +2970,12 @@ else
--~ print(dir.mkdirs("///a/b/c"))
--~ print(dir.mkdirs("a/bbb//ccc/"))
- function dir.expand_name(str)
+ function dir.expand_name(str) -- will be merged with cleanpath and collapsepath
if not find(str,"^/") then
str = lfs.currentdir() .. "/" .. str
end
- str = str:gsub("//","/")
- str = str:gsub("/%./","/")
+ str = gsub(str,"//","/")
+ str = gsub(str,"/%./","/")
return str
end
@@ -2340,7 +2990,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['l-boolean'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -2401,7 +3051,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['l-math'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -2448,7 +3098,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['l-utils'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -2456,6 +3106,10 @@ if not modules then modules = { } end modules ['l-utils'] = {
-- hm, quite unreadable
+local gsub = string.gsub
+local concat = table.concat
+local type, next = type, next
+
if not utils then utils = { } end
if not utils.merger then utils.merger = { } end
if not utils.lua then utils.lua = { } end
@@ -2493,7 +3147,7 @@ function utils.merger._self_load_(name)
end
if data and utils.merger.strip_comment then
-- saves some 20K
- data = data:gsub("%-%-~[^\n\r]*[\r\n]", "")
+ data = gsub(data,"%-%-~[^\n\r]*[\r\n]", "")
end
return data or ""
end
@@ -2511,7 +3165,7 @@ end
function utils.merger._self_swap_(data,code)
if data ~= "" then
- return (data:gsub(utils.merger.pattern, function(s)
+ return (gsub(data,utils.merger.pattern, function(s)
return "\n\n" .. "-- "..utils.merger.m_begin .. "\n" .. code .. "\n" .. "-- "..utils.merger.m_end .. "\n\n"
end, 1))
else
@@ -2521,8 +3175,8 @@ end
--~ stripper:
--~
---~ data = string.gsub(data,"%-%-~[^\n]*\n","")
---~ data = string.gsub(data,"\n\n+","\n")
+--~ data = gsub(data,"%-%-~[^\n]*\n","")
+--~ data = gsub(data,"\n\n+","\n")
function utils.merger._self_libs_(libs,list)
local result, f, frozen = { }, nil, false
@@ -2530,9 +3184,10 @@ function utils.merger._self_libs_(libs,list)
if type(libs) == 'string' then libs = { libs } end
if type(list) == 'string' then list = { list } end
local foundpath = nil
- for _, lib in ipairs(libs) do
- for _, pth in ipairs(list) do
- pth = string.gsub(pth,"\\","/") -- file.clean_path
+ for i=1,#libs do
+ local lib = libs[i]
+ for j=1,#list do
+ local pth = gsub(list[j],"\\","/") -- file.clean_path
utils.report("checking library path %s",pth)
local name = pth .. "/" .. lib
if lfs.isfile(name) then
@@ -2544,7 +3199,8 @@ function utils.merger._self_libs_(libs,list)
if foundpath then
utils.report("using library path %s",foundpath)
local right, wrong = { }, { }
- for _, lib in ipairs(libs) do
+ for i=1,#libs do
+ local lib = libs[i]
local fullname = foundpath .. "/" .. lib
if lfs.isfile(fullname) then
-- right[#right+1] = lib
@@ -2558,15 +3214,15 @@ function utils.merger._self_libs_(libs,list)
end
end
if #right > 0 then
- utils.report("merged libraries: %s",table.concat(right," "))
+ utils.report("merged libraries: %s",concat(right," "))
end
if #wrong > 0 then
- utils.report("skipped libraries: %s",table.concat(wrong," "))
+ utils.report("skipped libraries: %s",concat(wrong," "))
end
else
utils.report("no valid library path found")
end
- return table.concat(result, "\n\n")
+ return concat(result, "\n\n")
end
function utils.merger.selfcreate(libs,list,target)
@@ -2624,16 +3280,28 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['l-aux'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
+-- for inline, no store split : for s in string.gmatch(str,",* *([^,]+)") do .. end
+
aux = aux or { }
local concat, format, gmatch = table.concat, string.format, string.gmatch
local tostring, type = tostring, type
+local lpegmatch = lpeg.match
+
+local P, R, V = lpeg.P, lpeg.R, lpeg.V
+
+local escape, left, right = P("\\"), P('{'), P('}')
+
+lpeg.patterns.balanced = P {
+ [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
+ [2] = left * V(1) * right
+}
local space = lpeg.P(' ')
local equal = lpeg.P("=")
@@ -2641,7 +3309,7 @@ local comma = lpeg.P(",")
local lbrace = lpeg.P("{")
local rbrace = lpeg.P("}")
local nobrace = 1 - (lbrace+rbrace)
-local nested = lpeg.P{ lbrace * (nobrace + lpeg.V(1))^0 * rbrace }
+local nested = lpeg.P { lbrace * (nobrace + lpeg.V(1))^0 * rbrace }
local spaces = space^0
local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0)
@@ -2679,13 +3347,13 @@ function aux.make_settings_to_hash_pattern(set,how)
end
end
-function aux.settings_to_hash(str)
+function aux.settings_to_hash(str,existing)
if str and str ~= "" then
- hash = { }
+ hash = existing or { }
if moretolerant then
- pattern_b_s:match(str)
+ lpegmatch(pattern_b_s,str)
else
- pattern_a_s:match(str)
+ lpegmatch(pattern_a_s,str)
end
return hash
else
@@ -2693,39 +3361,41 @@ function aux.settings_to_hash(str)
end
end
-function aux.settings_to_hash_tolerant(str)
+function aux.settings_to_hash_tolerant(str,existing)
if str and str ~= "" then
- hash = { }
- pattern_b_s:match(str)
+ hash = existing or { }
+ lpegmatch(pattern_b_s,str)
return hash
else
return { }
end
end
-function aux.settings_to_hash_strict(str)
+function aux.settings_to_hash_strict(str,existing)
if str and str ~= "" then
- hash = { }
- pattern_c_s:match(str)
+ hash = existing or { }
+ lpegmatch(pattern_c_s,str)
return next(hash) and hash
else
return nil
end
end
-local seperator = comma * space^0
+local separator = comma * space^0
local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0)
-local pattern = lpeg.Ct(value*(seperator*value)^0)
+local pattern = lpeg.Ct(value*(separator*value)^0)
-- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
aux.settings_to_array_pattern = pattern
+-- we could use a weak table as cache
+
function aux.settings_to_array(str)
if not str or str == "" then
return { }
else
- return pattern:match(str)
+ return lpegmatch(pattern,str)
end
end
@@ -2734,10 +3404,10 @@ local function set(t,v)
end
local value = lpeg.P(lpeg.Carg(1)*value) / set
-local pattern = value*(seperator*value)^0 * lpeg.Carg(1)
+local pattern = value*(separator*value)^0 * lpeg.Carg(1)
function aux.add_settings_to_array(t,str)
- return pattern:match(str, nil, t)
+ return lpegmatch(pattern,str,nil,t)
end
function aux.hash_to_string(h,separator,yes,no,strict,omit)
@@ -2785,6 +3455,13 @@ function aux.settings_to_set(str,t)
return t
end
+local value = lbrace * lpeg.C((nobrace + nested)^0) * rbrace
+local pattern = lpeg.Ct((space + value)^0)
+
+function aux.arguments_to_table(str)
+ return lpegmatch(pattern,str)
+end
+
-- temporary here
function aux.getparameters(self,class,parentclass,settings)
@@ -2793,36 +3470,31 @@ function aux.getparameters(self,class,parentclass,settings)
sc = table.clone(self[parent])
self[class] = sc
end
- aux.add_settings_to_array(sc, settings)
+ aux.settings_to_hash(settings,sc)
end
-- temporary here
-local digit = lpeg.R("09")
-local period = lpeg.P(".")
-local zero = lpeg.P("0")
-
---~ local finish = lpeg.P(-1)
---~ local nodigit = (1-digit) + finish
---~ local case_1 = (period * zero^1 * #nodigit)/"" -- .000
---~ local case_2 = (period * (1-(zero^0/"") * #nodigit)^1 * (zero^0/"") * nodigit) -- .010 .10 .100100
-
+local digit = lpeg.R("09")
+local period = lpeg.P(".")
+local zero = lpeg.P("0")
local trailingzeros = zero^0 * -digit -- suggested by Roberto R
-local case_1 = period * trailingzeros / ""
-local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
-
-local number = digit^1 * (case_1 + case_2)
-local stripper = lpeg.Cs((number + 1)^0)
+local case_1 = period * trailingzeros / ""
+local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
+local number = digit^1 * (case_1 + case_2)
+local stripper = lpeg.Cs((number + 1)^0)
--~ local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100"
--~ collectgarbage("collect")
--~ str = string.rep(sample,10000)
--~ local ts = os.clock()
---~ stripper:match(str)
---~ print(#str, os.clock()-ts, stripper:match(sample))
+--~ lpegmatch(stripper,str)
+--~ print(#str, os.clock()-ts, lpegmatch(stripper,sample))
+
+lpeg.patterns.strip_zeros = stripper
function aux.strip_zeros(str)
- return stripper:match(str)
+ return lpegmatch(stripper,str)
end
function aux.definetable(target) -- defines undefined tables
@@ -2846,6 +3518,375 @@ function aux.accesstable(target)
return t
end
+--~ function string.commaseparated(str)
+--~ return gmatch(str,"([^,%s]+)")
+--~ end
+
+-- as we use this a lot ...
+
+--~ function aux.cachefunction(action,weak)
+--~ local cache = { }
+--~ if weak then
+--~ setmetatable(cache, { __mode = "kv" } )
+--~ end
+--~ local function reminder(str)
+--~ local found = cache[str]
+--~ if not found then
+--~ found = action(str)
+--~ cache[str] = found
+--~ end
+--~ return found
+--~ end
+--~ return reminder, cache
+--~ end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-tra'] = {
+ version = 1.001,
+ comment = "companion to trac-tra.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- the <anonymous> tag is kind of generic and used for functions that are not
+-- bound to a variable, like node.new, node.copy etc (contrary to for instance
+-- node.has_attribute which is bound to a has_attribute local variable in mkiv)
+
+local debug = require "debug"
+
+local getinfo = debug.getinfo
+local type, next = type, next
+local concat = table.concat
+local format, find, lower, gmatch, gsub = string.format, string.find, string.lower, string.gmatch, string.gsub
+
+debugger = debugger or { }
+
+local counters = { }
+local names = { }
+
+-- one
+
+local function hook()
+ local f = getinfo(2,"f").func
+ local n = getinfo(2,"Sn")
+-- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ if f then
+ local cf = counters[f]
+ if cf == nil then
+ counters[f] = 1
+ names[f] = n
+ else
+ counters[f] = cf + 1
+ end
+ end
+end
+local function getname(func)
+ local n = names[func]
+ if n then
+ if n.what == "C" then
+ return n.name or '<anonymous>'
+ else
+ -- source short_src linedefined what name namewhat nups func
+ local name = n.name or n.namewhat or n.what
+ if not name or name == "" then name = "?" end
+ return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ end
+ else
+ return "unknown"
+ end
+end
+function debugger.showstats(printer,threshold)
+ printer = printer or texio.write or print
+ threshold = threshold or 0
+ local total, grandtotal, functions = 0, 0, 0
+ printer("\n") -- ugly but ok
+ -- table.sort(counters)
+ for func, count in next, counters do
+ if count > threshold then
+ local name = getname(func)
+ if not find(name,"for generator") then
+ printer(format("%8i %s", count, name))
+ total = total + count
+ end
+ end
+ grandtotal = grandtotal + count
+ functions = functions + 1
+ end
+ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+end
+
+-- two
+
+--~ local function hook()
+--~ local n = getinfo(2)
+--~ if n.what=="C" and not n.name then
+--~ local f = tostring(debug.traceback())
+--~ local cf = counters[f]
+--~ if cf == nil then
+--~ counters[f] = 1
+--~ names[f] = n
+--~ else
+--~ counters[f] = cf + 1
+--~ end
+--~ end
+--~ end
+--~ function debugger.showstats(printer,threshold)
+--~ printer = printer or texio.write or print
+--~ threshold = threshold or 0
+--~ local total, grandtotal, functions = 0, 0, 0
+--~ printer("\n") -- ugly but ok
+--~ -- table.sort(counters)
+--~ for func, count in next, counters do
+--~ if count > threshold then
+--~ printer(format("%8i %s", count, func))
+--~ total = total + count
+--~ end
+--~ grandtotal = grandtotal + count
+--~ functions = functions + 1
+--~ end
+--~ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+--~ end
+
+-- rest
+
+function debugger.savestats(filename,threshold)
+ local f = io.open(filename,'w')
+ if f then
+ debugger.showstats(function(str) f:write(str) end,threshold)
+ f:close()
+ end
+end
+
+function debugger.enable()
+ debug.sethook(hook,"c")
+end
+
+function debugger.disable()
+ debug.sethook()
+--~ counters[debug.getinfo(2,"f").func] = nil
+end
+
+function debugger.tracing()
+ local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0
+ if n > 0 then
+ function debugger.tracing() return true end ; return true
+ else
+ function debugger.tracing() return false end ; return false
+ end
+end
+
+--~ debugger.enable()
+
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+
+--~ debugger.disable()
+
+--~ print("")
+--~ debugger.showstats()
+--~ print("")
+--~ debugger.showstats(print,3)
+
+setters = setters or { }
+setters.data = setters.data or { }
+
+--~ local function set(t,what,value)
+--~ local data, done = t.data, t.done
+--~ if type(what) == "string" then
+--~ what = aux.settings_to_array(what) -- inefficient but ok
+--~ end
+--~ for i=1,#what do
+--~ local w = what[i]
+--~ for d, f in next, data do
+--~ if done[d] then
+--~ -- prevent recursion due to wildcards
+--~ elseif find(d,w) then
+--~ done[d] = true
+--~ for i=1,#f do
+--~ f[i](value)
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+
+local function set(t,what,value)
+ local data, done = t.data, t.done
+ if type(what) == "string" then
+ what = aux.settings_to_hash(what) -- inefficient but ok
+ end
+ for w, v in next, what do
+ if v == "" then
+ v = value
+ else
+ v = toboolean(v)
+ end
+ for d, f in next, data do
+ if done[d] then
+ -- prevent recursion due to wildcards
+ elseif find(d,w) then
+ done[d] = true
+ for i=1,#f do
+ f[i](v)
+ end
+ end
+ end
+ end
+end
+
+local function reset(t)
+ for d, f in next, t.data do
+ for i=1,#f do
+ f[i](false)
+ end
+ end
+end
+
+local function enable(t,what)
+ set(t,what,true)
+end
+
+local function disable(t,what)
+ local data = t.data
+ if not what or what == "" then
+ t.done = { }
+ reset(t)
+ else
+ set(t,what,false)
+ end
+end
+
+function setters.register(t,what,...)
+ local data = t.data
+ what = lower(what)
+ local w = data[what]
+ if not w then
+ w = { }
+ data[what] = w
+ end
+ for _, fnc in next, { ... } do
+ local typ = type(fnc)
+ if typ == "function" then
+ w[#w+1] = fnc
+ elseif typ == "string" then
+ w[#w+1] = function(value) set(t,fnc,value,nesting) end
+ end
+ end
+end
+
+function setters.enable(t,what)
+ local e = t.enable
+ t.enable, t.done = enable, { }
+ enable(t,string.simpleesc(tostring(what)))
+ t.enable, t.done = e, { }
+end
+
+function setters.disable(t,what)
+ local e = t.disable
+ t.disable, t.done = disable, { }
+ disable(t,string.simpleesc(tostring(what)))
+ t.disable, t.done = e, { }
+end
+
+function setters.reset(t)
+ t.done = { }
+ reset(t)
+end
+
+function setters.list(t) -- pattern
+ local list = table.sortedkeys(t.data)
+ local user, system = { }, { }
+ for l=1,#list do
+ local what = list[l]
+ if find(what,"^%*") then
+ system[#system+1] = what
+ else
+ user[#user+1] = what
+ end
+ end
+ return user, system
+end
+
+function setters.show(t)
+ commands.writestatus("","")
+ local list = setters.list(t)
+ for k=1,#list do
+ commands.writestatus(t.name,list[k])
+ end
+ commands.writestatus("","")
+end
+
+-- we could have used a bit of oo and the trackers:enable syntax but
+-- there is already a lot of code around using the singular tracker
+
+-- we could make this into a module
+
+function setters.new(name)
+ local t
+ t = {
+ data = { },
+ name = name,
+ enable = function(...) setters.enable (t,...) end,
+ disable = function(...) setters.disable (t,...) end,
+ register = function(...) setters.register(t,...) end,
+ list = function(...) setters.list (t,...) end,
+ show = function(...) setters.show (t,...) end,
+ }
+ setters.data[name] = t
+ return t
+end
+
+trackers = setters.new("trackers")
+directives = setters.new("directives")
+experiments = setters.new("experiments")
+
+-- nice trick: we overload two of the directives related functions with variants that
+-- do tracing (itself using a tracker) .. proof of concept
+
+local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end)
+local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end)
+
+local e = directives.enable
+local d = directives.disable
+
+function directives.enable(...)
+ commands.writestatus("directives","enabling: %s",concat({...}," "))
+ e(...)
+end
+
+function directives.disable(...)
+ commands.writestatus("directives","disabling: %s",concat({...}," "))
+ d(...)
+end
+
+local e = experiments.enable
+local d = experiments.disable
+
+function experiments.enable(...)
+ commands.writestatus("experiments","enabling: %s",concat({...}," "))
+ e(...)
+end
+
+function experiments.disable(...)
+ commands.writestatus("experiments","disabling: %s",concat({...}," "))
+ d(...)
+end
+
+-- a useful example
+
+directives.register("system.nostatistics", function(v)
+ statistics.enable = not v
+end)
+
+
end -- of closure
@@ -2859,6 +3900,12 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
license = "see context related readme files"
}
+-- this module needs a cleanup: check latest lpeg, passing args, (sub)grammar, etc etc
+-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
+-- trouble
+
+local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
+
--[[ldx--
<p>The parser used here is inspired by the variant discussed in the lua book, but
handles comment and processing instructions, has a different structure, provides
@@ -2866,18 +3913,6 @@ parent access; a first version used different trickery but was less optimized to
went this route. First we had a find based parser, now we have an <l n='lpeg'/> based one.
The find based parser can be found in l-xml-edu.lua along with other older code.</p>
-<p>Expecially the lpath code is experimental, we will support some of xpath, but
-only things that make sense for us; as compensation it is possible to hook in your
-own functions. Apart from preprocessing content for <l n='context'/> we also need
-this module for process management, like handling <l n='ctx'/> and <l n='rlx'/>
-files.</p>
-
-<typing>
-a/b/c /*/c
-a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n)
-a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n)
-</typing>
-
<p>Beware, the interface may change. For instance at, ns, tg, dt may get more
verbose names. Once the code is stable we will also remove some tracing and
optimize the code.</p>
@@ -2888,26 +3923,11 @@ xml = xml or { }
--~ local xml = xml
local concat, remove, insert = table.concat, table.remove, table.insert
-local type, next, setmetatable = type, next, setmetatable
-local format, lower, find = string.format, string.lower, string.find
-
---[[ldx--
-<p>This module can be used stand alone but also inside <l n='mkiv'/> in
-which case it hooks into the tracker code. Therefore we provide a few
-functions that set the tracers.</p>
---ldx]]--
-
-local trace_remap = false
-
-if trackers then
- trackers.register("xml.remap", function(v) trace_remap = v end)
-end
-
-function xml.settrace(str,value)
- if str == "remap" then
- trace_remap = value or false
- end
-end
+local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
+local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
+local utfchar = unicode.utf8.char
+local lpegmatch = lpeg.match
+local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
--[[ldx--
<p>First a hack to enable namespace resolving. A namespace is characterized by
@@ -2919,7 +3939,7 @@ much cleaner.</p>
xml.xmlns = xml.xmlns or { }
-local check = lpeg.P(false)
+local check = P(false)
local parse = check
--[[ldx--
@@ -2932,8 +3952,8 @@ xml.registerns("mml","mathml")
--ldx]]--
function xml.registerns(namespace, pattern) -- pattern can be an lpeg
- check = check + lpeg.C(lpeg.P(lower(pattern))) / namespace
- parse = lpeg.P { lpeg.P(check) + 1 * lpeg.V(1) }
+ check = check + C(P(lower(pattern))) / namespace
+ parse = P { P(check) + 1 * V(1) }
end
--[[ldx--
@@ -2947,7 +3967,7 @@ xml.checkns("m","http://www.w3.org/mathml")
--ldx]]--
function xml.checkns(namespace,url)
- local ns = parse:match(lower(url))
+ local ns = lpegmatch(parse,lower(url))
if ns and namespace ~= ns then
xml.xmlns[namespace] = ns
end
@@ -2965,7 +3985,7 @@ This returns <t>mml</t>.
--ldx]]--
function xml.resolvens(url)
- return parse:match(lower(url)) or ""
+ return lpegmatch(parse,lower(url)) or ""
end
--[[ldx--
@@ -3004,27 +4024,36 @@ local x = xml.convert(somestring)
<p>An optional second boolean argument tells this function not to create a root
element.</p>
---ldx]]--
-xml.strip_cm_and_dt = false -- an extra global flag, in case we have many includes
+<p>Valid entities are:</p>
+
+<typing>
+<!ENTITY xxxx SYSTEM "yyyy" NDATA zzzz>
+<!ENTITY xxxx PUBLIC "yyyy" >
+<!ENTITY xxxx "yyyy" >
+</typing>
+--ldx]]--
-- not just one big nested table capture (lpeg overflow)
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = {}, {}, {}, {}, {}, nil, {}
+local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
+local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
+local dcache, hcache, acache = { }, { }, { }
-local mt = { __tostring = xml.text }
+local mt = { }
-function xml.check_error(top,toclose)
- return ""
+function initialize_mt(root)
+ mt = { __index = root } -- will be redefined later
end
-local strip = false
-local cleanup = false
+function xml.setproperty(root,k,v)
+ getmetatable(root).__index[k] = v
+end
-function xml.set_text_cleanup(fnc)
- cleanup = fnc
+function xml.check_error(top,toclose)
+ return ""
end
local function add_attribute(namespace,tag,value)
@@ -3034,12 +4063,31 @@ local function add_attribute(namespace,tag,value)
if tag == "xmlns" then
xmlns[#xmlns+1] = resolvens(value)
at[tag] = value
+ elseif namespace == "" then
+ at[tag] = value
elseif namespace == "xmlns" then
xml.checkns(tag,value)
at["xmlns:" .. tag] = value
else
- at[tag] = value
+ -- for the moment this way:
+ at[namespace .. ":" .. tag] = value
+ end
+end
+
+local function add_empty(spacing, namespace, tag)
+ if #spacing > 0 then
+ dt[#dt+1] = spacing
+ end
+ local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace
+ top = stack[#stack]
+ dt = top.dt
+ local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top }
+ dt[#dt+1] = t
+ setmetatable(t, mt)
+ if at.xmlns then
+ remove(xmlns)
end
+ at = { }
end
local function add_begin(spacing, namespace, tag)
@@ -3067,28 +4115,12 @@ local function add_end(spacing, namespace, tag)
end
dt = top.dt
dt[#dt+1] = toclose
- dt[0] = top
+ -- dt[0] = top -- nasty circular reference when serializing table
if toclose.at.xmlns then
remove(xmlns)
end
end
-local function add_empty(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace
- top = stack[#stack]
- dt = top.dt
- local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top }
- dt[#dt+1] = t
- setmetatable(t, mt)
- if at.xmlns then
- remove(xmlns)
- end
- at = { }
-end
-
local function add_text(text)
if cleanup and #text > 0 then
dt[#dt+1] = cleanup(text)
@@ -3104,7 +4136,7 @@ local function add_special(what, spacing, text)
if strip and (what == "@cm@" or what == "@dt@") then
-- forget it
else
- dt[#dt+1] = { special=true, ns="", tg=what, dt={text} }
+ dt[#dt+1] = { special=true, ns="", tg=what, dt={ text } }
end
end
@@ -3112,7 +4144,199 @@ local function set_message(txt)
errorstr = "garbage at the end of the file: " .. gsub(txt,"([ \n\r\t]*)","")
end
-local P, S, R, C, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V
+local reported_attribute_errors = { }
+
+local function attribute_value_error(str)
+ if not reported_attribute_errors[str] then
+ logs.report("xml","invalid attribute value: %q",str)
+ reported_attribute_errors[str] = true
+ at._error_ = str
+ end
+ return str
+end
+local function attribute_specification_error(str)
+ if not reported_attribute_errors[str] then
+ logs.report("xml","invalid attribute specification: %q",str)
+ reported_attribute_errors[str] = true
+ at._error_ = str
+ end
+ return str
+end
+
+function xml.unknown_dec_entity_format(str) return (str == "" and "&error;") or format("&%s;",str) end
+function xml.unknown_hex_entity_format(str) return format("&#x%s;",str) end
+function xml.unknown_any_entity_format(str) return format("&#x%s;",str) end
+
+local function fromhex(s)
+ local n = tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return format("h:%s",s), true
+ end
+end
+
+local function fromdec(s)
+ local n = tonumber(s)
+ if n then
+ return utfchar(n)
+ else
+ return format("d:%s",s), true
+ end
+end
+
+-- one level expansion (simple case), no checking done
+
+local rest = (1-P(";"))^0
+local many = P(1)^0
+
+local parsedentity =
+ P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
+ (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
+
+-- parsing in the xml file
+
+local predefined_unified = {
+ [38] = "&amp;",
+ [42] = "&quot;",
+ [47] = "&apos;",
+ [74] = "&lt;",
+ [76] = "&gr;",
+}
+
+local predefined_simplified = {
+ [38] = "&", amp = "&",
+ [42] = '"', quot = '"',
+ [47] = "'", apos = "'",
+ [74] = "<", lt = "<",
+ [76] = ">", gt = ">",
+}
+
+local function handle_hex_entity(str)
+ local h = hcache[str]
+ if not h then
+ local n = tonumber(str,16)
+ h = unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
+ end
+ elseif utfize then
+ h = (n and utfchar(n)) or xml.unknown_hex_entity_format(str) or ""
+ if not n then
+ logs.report("xml","utfize, ignoring hex entity &#x%s;",str)
+ elseif trace_entities then
+ logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
+ end
+ else
+ if trace_entities then
+ logs.report("xml","found entity &#x%s;",str)
+ end
+ h = "&#x" .. str .. ";"
+ end
+ hcache[str] = h
+ end
+ return h
+end
+
+local function handle_dec_entity(str)
+ local d = dcache[str]
+ if not d then
+ local n = tonumber(str)
+ d = unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ logs.report("xml","utfize, converting dec entity &#%s; into %s",str,d)
+ end
+ elseif utfize then
+ d = (n and utfchar(n)) or xml.unknown_dec_entity_format(str) or ""
+ if not n then
+ logs.report("xml","utfize, ignoring dec entity &#%s;",str)
+ elseif trace_entities then
+ logs.report("xml","utfize, converting dec entity &#%s; into %s",str,h)
+ end
+ else
+ if trace_entities then
+ logs.report("xml","found entity &#%s;",str)
+ end
+ d = "&#" .. str .. ";"
+ end
+ dcache[str] = d
+ end
+ return d
+end
+
+xml.parsedentitylpeg = parsedentity
+
+local function handle_any_entity(str)
+ if resolve then
+ local a = acache[str] -- per instance ! todo
+ if not a then
+ a = resolve_predefined and predefined_simplified[str]
+ if a then
+ -- one of the predefined
+ elseif type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
+ end
+ if a then
+ if trace_entities then
+ logs.report("xml","resolved entity &%s; -> %s (internal)",str,a)
+ end
+ a = lpegmatch(parsedentity,a) or a
+ else
+ if xml.unknown_any_entity_format then
+ a = xml.unknown_any_entity_format(str) or ""
+ end
+ if a then
+ if trace_entities then
+ logs.report("xml","resolved entity &%s; -> %s (external)",str,a)
+ end
+ else
+ if trace_entities then
+ logs.report("xml","keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
+ end
+ end
+ acache[str] = a
+ elseif trace_entities then
+ if not acache[str] then
+ logs.report("xml","converting entity &%s; into %s",str,a)
+ acache[str] = a
+ end
+ end
+ return a
+ else
+ local a = acache[str]
+ if not a then
+ if trace_entities then
+ logs.report("xml","found entity &%s;",str)
+ end
+ a = resolve_predefined and predefined_simplified[str]
+ if a then
+ -- one of the predefined
+ acache[str] = a
+ elseif str == "" then
+ a = "&error;"
+ acache[str] = a
+ else
+ a = "&" .. str .. ";"
+ acache[str] = a
+ end
+ end
+ return a
+ end
+end
+
+local function handle_end_entity(chr)
+ logs.report("xml","error in entity, %q found instead of ';'",chr)
+end
local space = S(' \r\n\t')
local open = P('<')
@@ -3122,24 +4346,50 @@ local dquote = S('"')
local equal = P('=')
local slash = P('/')
local colon = P(':')
+local semicolon = P(';')
+local ampersand = P('&')
local valid = R('az', 'AZ', '09') + S('_-.')
local name_yes = C(valid^1) * colon * C(valid^1)
local name_nop = C(P(true)) * C(valid^1)
local name = name_yes + name_nop
+local utfbom = lpeg.patterns.utfbom -- no capture
+local spacing = C(space^0)
-local utfbom = P('\000\000\254\255') + P('\255\254\000\000') +
- P('\255\254') + P('\254\255') + P('\239\187\191') -- no capture
+----- entitycontent = (1-open-semicolon)^0
+local anyentitycontent = (1-open-semicolon-space-close)^0
+local hexentitycontent = R("AF","af","09")^0
+local decentitycontent = R("09")^0
+local parsedentity = P("#")/"" * (
+ P("x")/"" * (hexentitycontent/handle_hex_entity) +
+ (decentitycontent/handle_dec_entity)
+ ) + (anyentitycontent/handle_any_entity)
+local entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity))
+
+local text_unparsed = C((1-open)^1)
+local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1)
-local spacing = C(space^0)
-local justtext = C((1-open)^1)
local somespace = space^1
local optionalspace = space^0
-local value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote)
-local attribute = (somespace * name * optionalspace * equal * optionalspace * value) / add_attribute
-local attributes = attribute^0
+----- value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -- ampersand and < also invalid in value
+local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dquote * Cs((entity + (1 - dquote))^0) * dquote) -- ampersand and < also invalid in value
+
+local endofattributes = slash * close + close -- recovery of flacky html
+local whatever = space * name * optionalspace * equal
+local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
+----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
+local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
-local text = justtext / add_text
+local attributevalue = value + wrongvalue
+
+local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute
+----- attributes = (attribute)^0
+
+local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0
+
+local parsedtext = text_parsed / add_text
+local unparsedtext = text_unparsed / add_text
local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example
local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty
@@ -3157,19 +4407,27 @@ local someinstruction = C((1 - endinstruction)^0)
local somecomment = C((1 - endcomment )^0)
local somecdata = C((1 - endcdata )^0)
-local function entity(k,v) entities[k] = v end
+local function normalentity(k,v ) entities[k] = v end
+local function systementity(k,v,n) entities[k] = v end
+local function publicentity(k,v,n) entities[k] = v end
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
local endset = P("]")
-local doctypename = C((1-somespace)^0)
+local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
-local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (doctypename * somespace * value)/entity * optionalspace * close
-local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace
-local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace
-local definitiondoctype= doctypename * somespace * beginset * P(elementdoctype + entitydoctype)^0 * optionalspace * endset
-local simpledoctype = (1-close)^1 -- * balanced^0
+
+local normalentitytype = (doctypename * somespace * value)/normalentity
+local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
+local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
+local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+local definitiondoctype= doctypename * somespace * doctypeset
+local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
+local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
+local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
@@ -3179,60 +4437,116 @@ local doctype = (spacing * begindoctype * somedoctype * enddoct
-- nicer but slower:
--
--- local instruction = (lpeg.Cc("@pi@") * spacing * begininstruction * someinstruction * endinstruction) / add_special
--- local comment = (lpeg.Cc("@cm@") * spacing * begincomment * somecomment * endcomment ) / add_special
--- local cdata = (lpeg.Cc("@cd@") * spacing * begincdata * somecdata * endcdata ) / add_special
--- local doctype = (lpeg.Cc("@dt@") * spacing * begindoctype * somedoctype * enddoctype ) / add_special
+-- local instruction = (Cc("@pi@") * spacing * begininstruction * someinstruction * endinstruction) / add_special
+-- local comment = (Cc("@cm@") * spacing * begincomment * somecomment * endcomment ) / add_special
+-- local cdata = (Cc("@cd@") * spacing * begincdata * somecdata * endcdata ) / add_special
+-- local doctype = (Cc("@dt@") * spacing * begindoctype * somedoctype * enddoctype ) / add_special
-local trailer = space^0 * (justtext/set_message)^0
+local trailer = space^0 * (text_unparsed/set_message)^0
-- comment + emptyelement + text + cdata + instruction + V("parent"), -- 6.5 seconds on 40 MB database file
-- text + comment + emptyelement + cdata + instruction + V("parent"), -- 5.8
-- text + V("parent") + emptyelement + comment + cdata + instruction, -- 5.5
-local grammar = P { "preamble",
+local grammar_parsed_text = P { "preamble",
preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
parent = beginelement * V("children")^0 * endelement,
- children = text + V("parent") + emptyelement + comment + cdata + instruction,
+ children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- todo: xml.new + properties like entities and strip and such (store in root)
+local grammar_unparsed_text = P { "preamble",
+ preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
+ parent = beginelement * V("children")^0 * endelement,
+ children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
+}
-function xml.convert(data, no_root, strip_cm_and_dt, given_entities) -- maybe use table met k/v (given_entities may disapear)
- strip = strip_cm_and_dt or xml.strip_cm_and_dt
- stack, top, at, xmlns, errorstr, result, entities = {}, {}, {}, {}, nil, nil, given_entities or {}
+-- maybe we will add settinsg to result as well
+
+local function xmlconvert(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
+ resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ stack, top, at, xmlns, errorstr, result, entities = { }, { }, { }, { }, nil, nil, settings.entities or { }
+ acache, hcache, dcache = { }, { }, { } -- not stored
+ reported_attribute_errors = { }
+ if settings.parent_root then
+ mt = getmetatable(settings.parent_root)
+ else
+ initialize_mt(top)
+ end
stack[#stack+1] = top
top.dt = { }
dt = top.dt
if not data or data == "" then
errorstr = "empty xml file"
- elseif not grammar:match(data) then
- errorstr = "invalid xml file"
+ elseif utfize or resolve then
+ if lpegmatch(grammar_parsed_text,data) then
+ errorstr = ""
+ else
+ errorstr = "invalid xml file - parsed text"
+ end
+ elseif type(data) == "string" then
+ if lpegmatch(grammar_unparsed_text,data) then
+ errorstr = ""
+ else
+ errorstr = "invalid xml file - unparsed text"
+ end
else
- errorstr = ""
+ errorstr = "invalid xml file - no text at all"
end
if errorstr and errorstr ~= "" then
- result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={}, er = true } }, error = true }
+ result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
setmetatable(stack, mt)
- if xml.error_handler then xml.error_handler("load",errorstr) end
+ local error_handler = settings.error_handler
+ if error_handler == false then
+ -- no error message
+ else
+ error_handler = error_handler or xml.error_handler
+ if error_handler then
+ xml.error_handler("load",errorstr)
+ end
+ end
else
result = stack[1]
end
- if not no_root then
- result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={}, entities = entities }
+ if not settings.no_root then
+ result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={ }, entities = entities, settings = settings }
setmetatable(result, mt)
local rdt = result.dt
for k=1,#rdt do
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
+v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
end
+ if errorstr and errorstr ~= "" then
+ result.error = true
+ end
return result
end
+xml.convert = xmlconvert
+
+function xml.inheritedconvert(data,xmldata)
+ local settings = xmldata.settings
+ settings.parent_root = xmldata -- to be tested
+ -- settings.no_root = true
+ local xc = xmlconvert(data,settings)
+ -- xc.settings = nil
+ -- xc.entities = nil
+ -- xc.special = nil
+ -- xc.ri = nil
+ -- print(xc.tg)
+ return xc
+end
+
--[[ldx--
<p>Packaging data in an xml like table is done with the following
function. Maybe it will go away (when not used).</p>
@@ -3243,7 +4557,7 @@ function xml.is_valid(root)
end
function xml.package(tag,attributes,data)
- local ns, tg = tag:match("^(.-):?([^:]+)$")
+ local ns, tg = match(tag,"^(.-):?([^:]+)$")
local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} }
setmetatable(t, mt)
return t
@@ -3261,21 +4575,19 @@ the whole file first. The function accepts a string representing
a filename or a file handle.</p>
--ldx]]--
-function xml.load(filename)
+function xml.load(filename,settings)
+ local data = ""
if type(filename) == "string" then
+ -- local data = io.loaddata(filename) - -todo: check type in io.loaddata
local f = io.open(filename,'r')
if f then
- local root = xml.convert(f:read("*all"))
+ data = f:read("*all")
f:close()
- return root
- else
- return xml.convert("")
end
elseif filename then -- filehandle
- return xml.convert(filename:read("*all"))
- else
- return xml.convert("")
+ data = filename:read("*all")
end
+ return xmlconvert(data,settings)
end
--[[ldx--
@@ -3283,9 +4595,11 @@ end
valid trees, which is what the next function does.</p>
--ldx]]--
+local no_root = { no_root = true }
+
function xml.toxml(data)
if type(data) == "string" then
- local root = { xml.convert(data,true) }
+ local root = { xmlconvert(data,no_root) }
return (#root > 1 and root) or root[1]
else
return data
@@ -3305,7 +4619,7 @@ local function copy(old,tables)
if not tables[old] then
tables[old] = new
end
- for k,v in pairs(old) do
+ for k,v in next, old do
new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v
end
local mt = getmetatable(old)
@@ -3330,217 +4644,304 @@ alternative.</p>
-- todo: add <?xml version='1.0' standalone='yes'?> when not present
-local fallbackhandle = (tex and tex.sprint) or io.write
-
-local function serialize(e, handle, textconverter, attributeconverter, specialconverter, nocommands)
- if not e then
- return
- elseif not nocommands then
- local ec = e.command
- if ec ~= nil then -- we can have all kind of types
- if e.special then
- local etg, edt = e.tg, e.dt
- local spc = specialconverter and specialconverter[etg]
- if spc then
- local result = spc(edt[1])
- if result then
- handle(result)
- return
- else
- -- no need to handle any further
- end
- end
- end
- local xc = xml.command
- if xc then
- xc(e,ec)
- return
+function xml.checkbom(root) -- can be made faster
+ if root.ri then
+ local dt, found = root.dt, false
+ for k=1,#dt do
+ local v = dt[k]
+ if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
+ found = true
+ break
end
end
+ if not found then
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
+ end
end
- handle = handle or fallbackhandle
- local etg = e.tg
- if etg then
- if e.special then
- local edt = e.dt
- local spc = specialconverter and specialconverter[etg]
- if spc then
- local result = spc(edt[1])
- if result then
- handle(result)
+end
+
+--[[ldx--
+<p>At the cost of some 25% runtime overhead you can first convert the tree to a string
+and then handle the lot.</p>
+--ldx]]--
+
+-- new experimental reorganized serialize
+
+local function verbose_element(e,handlers)
+ local handle = handlers.handle
+ local serialize = handlers.serialize
+ local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
+ local ats = eat and next(eat) and { }
+ if ats then
+ for k,v in next, eat do
+ ats[#ats+1] = format('%s=%q',k,v)
+ end
+ end
+ if ern and trace_remap and ern ~= ens then
+ ens = ern
+ end
+ if ens ~= "" then
+ if edt and #edt > 0 then
+ if ats then
+ handle("<",ens,":",etg," ",concat(ats," "),">")
+ else
+ handle("<",ens,":",etg,">")
+ end
+ for i=1,#edt do
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(e)
else
- -- no need to handle any further
- end
- elseif etg == "@pi@" then
- -- handle(format("<?%s?>",edt[1]))
- handle("<?" .. edt[1] .. "?>")
- elseif etg == "@cm@" then
- -- handle(format("<!--%s-->",edt[1]))
- handle("<!--" .. edt[1] .. "-->")
- elseif etg == "@cd@" then
- -- handle(format("<![CDATA[%s]]>",edt[1]))
- handle("<![CDATA[" .. edt[1] .. "]]>")
- elseif etg == "@dt@" then
- -- handle(format("<!DOCTYPE %s>",edt[1]))
- handle("<!DOCTYPE " .. edt[1] .. ">")
- elseif etg == "@rt@" then
- serialize(edt,handle,textconverter,attributeconverter,specialconverter,nocommands)
+ serialize(e,handlers)
+ end
end
+ handle("</",ens,":",etg,">")
else
- local ens, eat, edt, ern = e.ns, e.at, e.dt, e.rn
- local ats = eat and next(eat) and { } -- type test maybe faster
if ats then
- if attributeconverter then
- for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,attributeconverter(v))
- end
- else
- for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
- end
- end
+ handle("<",ens,":",etg," ",concat(ats," "),"/>")
+ else
+ handle("<",ens,":",etg,"/>")
end
- if ern and trace_remap and ern ~= ens then
- ens = ern
+ end
+ else
+ if edt and #edt > 0 then
+ if ats then
+ handle("<",etg," ",concat(ats," "),">")
+ else
+ handle("<",etg,">")
end
- if ens ~= "" then
- if edt and #edt > 0 then
- if ats then
- -- handle(format("<%s:%s %s>",ens,etg,concat(ats," ")))
- handle("<" .. ens .. ":" .. etg .. " " .. concat(ats," ") .. ">")
- else
- -- handle(format("<%s:%s>",ens,etg))
- handle("<" .. ens .. ":" .. etg .. ">")
- end
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "string" then
- if textconverter then
- handle(textconverter(e))
- else
- handle(e)
- end
- else
- serialize(e,handle,textconverter,attributeconverter,specialconverter,nocommands)
- end
- end
- -- handle(format("</%s:%s>",ens,etg))
- handle("</" .. ens .. ":" .. etg .. ">")
+ for i=1,#edt do
+ local ei = edt[i]
+ if type(ei) == "string" then
+ handle(ei)
else
- if ats then
- -- handle(format("<%s:%s %s/>",ens,etg,concat(ats," ")))
- handle("<" .. ens .. ":" .. etg .. " " .. concat(ats," ") .. "/>")
- else
- -- handle(format("<%s:%s/>",ens,etg))
- handle("<" .. ens .. ":" .. etg .. "/>")
- end
+ serialize(ei,handlers)
end
+ end
+ handle("</",etg,">")
+ else
+ if ats then
+ handle("<",etg," ",concat(ats," "),"/>")
else
- if edt and #edt > 0 then
- if ats then
- -- handle(format("<%s %s>",etg,concat(ats," ")))
- handle("<" .. etg .. " " .. concat(ats," ") .. ">")
- else
- -- handle(format("<%s>",etg))
- handle("<" .. etg .. ">")
- end
- for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- if textconverter then
- handle(textconverter(ei))
- else
- handle(ei)
- end
- else
- serialize(ei,handle,textconverter,attributeconverter,specialconverter,nocommands)
- end
- end
- -- handle(format("</%s>",etg))
- handle("</" .. etg .. ">")
- else
- if ats then
- -- handle(format("<%s %s/>",etg,concat(ats," ")))
- handle("<" .. etg .. " " .. concat(ats," ") .. "/>")
- else
- -- handle(format("<%s/>",etg))
- handle("<" .. etg .. "/>")
- end
- end
+ handle("<",etg,"/>")
end
end
- elseif type(e) == "string" then
- if textconverter then
- handle(textconverter(e))
+ end
+end
+
+local function verbose_pi(e,handlers)
+ handlers.handle("<?",e.dt[1],"?>")
+end
+
+local function verbose_comment(e,handlers)
+ handlers.handle("<!--",e.dt[1],"-->")
+end
+
+local function verbose_cdata(e,handlers)
+ handlers.handle("<![CDATA[", e.dt[1],"]]>")
+end
+
+local function verbose_doctype(e,handlers)
+ handlers.handle("<!DOCTYPE ",e.dt[1],">")
+end
+
+local function verbose_root(e,handlers)
+ handlers.serialize(e.dt,handlers)
+end
+
+local function verbose_text(e,handlers)
+ handlers.handle(e)
+end
+
+local function verbose_document(e,handlers)
+ local serialize = handlers.serialize
+ local functions = handlers.functions
+ for i=1,#e do
+ local ei = e[i]
+ if type(ei) == "string" then
+ functions["@tx@"](ei,handlers)
else
- handle(e)
+ serialize(ei,handlers)
end
- else
- for i=1,#e do
- local ei = e[i]
- if type(ei) == "string" then
- if textconverter then
- handle(textconverter(ei))
- else
- handle(ei)
- end
- else
- serialize(ei,handle,textconverter,attributeconverter,specialconverter,nocommands)
- end
+ end
+end
+
+local function serialize(e,handlers,...)
+ local initialize = handlers.initialize
+ local finalize = handlers.finalize
+ local functions = handlers.functions
+ if initialize then
+ local state = initialize(...)
+ if not state == true then
+ return state
end
end
+ local etg = e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ -- elseif type(e) == "string" then
+ -- functions["@tx@"](e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+ if finalize then
+ return finalize()
+ end
end
-xml.serialize = serialize
+local function xserialize(e,handlers)
+ local functions = handlers.functions
+ local etg = e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ -- elseif type(e) == "string" then
+ -- functions["@tx@"](e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+end
-function xml.checkbom(root) -- can be made faster
- if root.ri then
- local dt, found = root.dt, false
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "table" and v.special and v.tg == "@pi" and find(v.dt,"xml.*version=") then
- found = true
- break
+local handlers = { }
+
+local function newhandlers(settings)
+ local t = table.copy(handlers.verbose or { }) -- merge
+ if settings then
+ for k,v in next, settings do
+ if type(v) == "table" then
+ tk = t[k] if not tk then tk = { } t[k] = tk end
+ for kk,vv in next, v do
+ tk[kk] = vv
+ end
+ else
+ t[k] = v
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
+ if settings.name then
+ handlers[settings.name] = t
end
end
+ return t
end
+local nofunction = function() end
+
+function xml.sethandlersfunction(handler,name,fnc)
+ handler.functions[name] = fnc or nofunction
+end
+
+function xml.gethandlersfunction(handler,name)
+ return handler.functions[name]
+end
+
+function xml.gethandlers(name)
+ return handlers[name]
+end
+
+newhandlers {
+ name = "verbose",
+ initialize = false, -- faster than nil and mt lookup
+ finalize = false, -- faster than nil and mt lookup
+ serialize = xserialize,
+ handle = print,
+ functions = {
+ ["@dc@"] = verbose_document,
+ ["@dt@"] = verbose_doctype,
+ ["@rt@"] = verbose_root,
+ ["@el@"] = verbose_element,
+ ["@pi@"] = verbose_pi,
+ ["@cm@"] = verbose_comment,
+ ["@cd@"] = verbose_cdata,
+ ["@tx@"] = verbose_text,
+ }
+}
+
--[[ldx--
-<p>At the cost of some 25% runtime overhead you can first convert the tree to a string
-and then handle the lot.</p>
+<p>How you deal with saving data depends on your preferences. For a 40 MB database
+file the timing on a 2.3 Core Duo are as follows (time in seconds):</p>
+
+<lines>
+1.3 : load data from file to string
+6.1 : convert string into tree
+5.3 : saving in file using xmlsave
+6.8 : converting to string using xml.tostring
+3.6 : saving converted string in file
+</lines>
+
+<p>Beware, these were timing with the old routine but measurements will not be that
+much different I guess.</p>
--ldx]]--
-function xml.tostring(root) -- 25% overhead due to collecting
+-- maybe this will move to lxml-xml
+
+local result
+
+local xmlfilehandler = newhandlers {
+ name = "file",
+ initialize = function(name) result = io.open(name,"wb") return result end,
+ finalize = function() result:close() return true end,
+ handle = function(...) result:write(...) end,
+}
+
+-- no checking on writeability here but not faster either
+--
+-- local xmlfilehandler = newhandlers {
+-- initialize = function(name) io.output(name,"wb") return true end,
+-- finalize = function() io.close() return true end,
+-- handle = io.write,
+-- }
+
+
+function xml.save(root,name)
+ serialize(root,xmlfilehandler,name)
+end
+
+local result
+
+local xmlstringhandler = newhandlers {
+ name = "string",
+ initialize = function() result = { } return result end,
+ finalize = function() return concat(result) end,
+ handle = function(...) result[#result+1] = concat { ... } end
+}
+
+local function xmltostring(root) -- 25% overhead due to collecting
if root then
if type(root) == 'string' then
return root
- elseif next(root) then -- next is faster than type (and >0 test)
- local result = { }
- serialize(root,function(s) result[#result+1] = s end) -- brrr, slow (direct printing is faster)
- return concat(result,"")
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
end
return ""
end
+local function xmltext(root) -- inline
+ return (root and xmltostring(root)) or ""
+end
+
+function initialize_mt(root)
+ mt = { __tostring = xmltext, __index = root }
+end
+
+xml.defaulthandlers = handlers
+xml.newhandlers = newhandlers
+xml.serialize = serialize
+xml.tostring = xmltostring
+
--[[ldx--
<p>The next function operated on the content only and needs a handle function
that accepts a string.</p>
--ldx]]--
-function xml.string(e,handle)
+local function xmlstring(e,handle)
if not handle or (e.special and e.tg ~= "@rt@") then
-- nothing
elseif e.tg then
local edt = e.dt
if edt then
for i=1,#edt do
- xml.string(edt[i],handle)
+ xmlstring(edt[i],handle)
end
end
else
@@ -3548,54 +4949,52 @@ function xml.string(e,handle)
end
end
---[[ldx--
-<p>How you deal with saving data depends on your preferences. For a 40 MB database
-file the timing on a 2.3 Core Duo are as follows (time in seconds):</p>
+xml.string = xmlstring
-<lines>
-1.3 : load data from file to string
-6.1 : convert string into tree
-5.3 : saving in file using xmlsave
-6.8 : converting to string using xml.tostring
-3.6 : saving converted string in file
-</lines>
-
-<p>The save function is given below.</p>
+--[[ldx--
+<p>A few helpers:</p>
--ldx]]--
-function xml.save(root,name)
- local f = io.open(name,"w")
- if f then
- xml.serialize(root,function(s) f:write(s) end)
- f:close()
+--~ xmlsetproperty(root,"settings",settings)
+
+function xml.settings(e)
+ while e do
+ local s = e.settings
+ if s then
+ return s
+ else
+ e = e.__p__
+ end
end
+ return nil
end
---[[ldx--
-<p>A few helpers:</p>
---ldx]]--
-
-function xml.body(root)
- return (root.ri and root.dt[root.ri]) or root
+function xml.root(e)
+ local r = e
+ while e do
+ e = e.__p__
+ if e then
+ r = e
+ end
+ end
+ return r
end
-function xml.text(root)
- return (root and xml.tostring(root)) or ""
+function xml.parent(root)
+ return root.__p__
end
-function xml.content(root) -- bugged
- return (root and root.dt and xml.tostring(root.dt)) or ""
+function xml.body(root)
+ return (root.ri and root.dt[root.ri]) or root -- not ok yet
end
-function xml.isempty(root, pattern)
- if pattern == "" or pattern == "*" then
- pattern = nil
- end
- if pattern then
- -- todo
- return false
+function xml.name(root)
+ if not root then
+ return ""
+ elseif root.ns == "" then
+ return root.tg
else
- return not root or not root.dt or #root.dt == 0 or root.dt == ""
+ return root.ns .. ":" .. root.tg
end
end
@@ -3603,18 +5002,15 @@ end
<p>The next helper erases an element but keeps the table as it is,
and since empty strings are not serialized (effectively) it does
not harm. Copying the table would take more time. Usage:</p>
-
-<typing>
-dt[k] = xml.empty() or xml.empty(dt,k)
-</typing>
--ldx]]--
-function xml.empty(dt,k)
- if dt and k then
- dt[k] = ""
- return dt[k]
- else
- return ""
+function xml.erase(dt,k)
+ if dt then
+ if k then
+ dt[k] = ""
+ else for k=1,#dt do
+ dt[1] = { "" }
+ end end
end
end
@@ -3635,6 +5031,42 @@ function xml.assign(dt,k,root)
end
end
+-- the following helpers may move
+
+--[[ldx--
+<p>The next helper assigns a tree (or string). Usage:</p>
+<typing>
+xml.tocdata(e)
+xml.tocdata(e,"error")
+</typing>
+--ldx]]--
+
+function xml.tocdata(e,wrapper)
+ local whatever = xmltostring(e.dt)
+ if wrapper then
+ whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
+ end
+ local t = { special = true, ns = "", tg = "@cd@", at = {}, rn = "", dt = { whatever }, __p__ = e }
+ setmetatable(t,getmetatable(e))
+ e.dt = { t }
+end
+
+function xml.makestandalone(root)
+ if root.ri then
+ local dt = root.dt
+ for k=1,#dt do
+ local v = dt[k]
+ if type(v) == "table" and v.special and v.tg == "@pi@" then
+ local txt = v.dt[1]
+ if find(txt,"xml.*version=") then
+ v.dt[1] = txt .. " standalone='yes'"
+ break
+ end
+ end
+ end
+ end
+end
+
end -- of closure
@@ -3648,1420 +5080,1285 @@ if not modules then modules = { } end modules ['lxml-pth'] = {
license = "see context related readme files"
}
+-- e.ni is only valid after a filter run
+
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
-local format, lower, gmatch, gsub, find = string.format, string.lower, string.gmatch, string.gsub, string.find
+local format, upper, lower, gmatch, gsub, find, rep = string.format, string.upper, string.lower, string.gmatch, string.gsub, string.find, string.rep
+local lpegmatch = lpeg.match
+
+-- beware, this is not xpath ... e.g. position is different (currently) and
+-- we have reverse-sibling as reversed preceding sibling
--[[ldx--
<p>This module can be used stand alone but also inside <l n='mkiv'/> in
which case it hooks into the tracker code. Therefore we provide a few
functions that set the tracers. Here we overload a previously defined
function.</p>
+<p>If I can get in the mood I will make a variant that is XSLT compliant
+but I wonder if it makes sense.</P>
--ldx]]--
-local trace_lpath = false
-
-if trackers then
- trackers.register("xml.lpath", function(v) trace_lpath = v end)
-end
+--[[ldx--
+<p>Expecially the lpath code is experimental, we will support some of xpath, but
+only things that make sense for us; as compensation it is possible to hook in your
+own functions. Apart from preprocessing content for <l n='context'/> we also need
+this module for process management, like handling <l n='ctx'/> and <l n='rlx'/>
+files.</p>
-local settrace = xml.settrace -- lxml-tab
+<typing>
+a/b/c /*/c
+a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n)
+a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n)
+</typing>
+--ldx]]--
-function xml.settrace(str,value)
- if str == "lpath" then
- trace_lpath = value or false
- else
- settrace(str,value) -- lxml-tab
- end
-end
+local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end
+local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
+local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end
--[[ldx--
-<p>We've now arrived at an intersting part: accessing the tree using a subset
+<p>We've now arrived at an interesting part: accessing the tree using a subset
of <l n='xpath'/> and since we're not compatible we call it <l n='lpath'/>. We
will explain more about its usage in other documents.</p>
--ldx]]--
-local lpathcalls = 0 -- statistics
-local lpathcached = 0 -- statistics
+local lpathcalls = 0 function xml.lpathcalls () return lpathcalls end
+local lpathcached = 0 function xml.lpathcached() return lpathcached end
-xml.functions = xml.functions or { }
-xml.expressions = xml.expressions or { }
+xml.functions = xml.functions or { } -- internal
+xml.expressions = xml.expressions or { } -- in expressions
+xml.finalizers = xml.finalizers or { } -- fast do-with ... (with return value other than collection)
+xml.specialhandler = xml.specialhandler or { }
local functions = xml.functions
local expressions = xml.expressions
+local finalizers = xml.finalizers
-local actions = {
- [10] = "stay",
- [11] = "parent",
- [12] = "subtree root",
- [13] = "document root",
- [14] = "any",
- [15] = "many",
- [16] = "initial",
- [20] = "match",
- [21] = "match one of",
- [22] = "match and attribute eq",
- [23] = "match and attribute ne",
- [24] = "match one of and attribute eq",
- [25] = "match one of and attribute ne",
- [27] = "has attribute",
- [28] = "has value",
- [29] = "fast match",
- [30] = "select",
- [31] = "expression",
- [40] = "processing instruction",
-}
-
--- a rather dumb lpeg
+finalizers.xml = finalizers.xml or { }
+finalizers.tex = finalizers.tex or { }
-local P, S, R, C, V, Cc = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc
+local function fallback (t, name)
+ local fn = finalizers[name]
+ if fn then
+ t[name] = fn
+ else
+ logs.report("xml","unknown sub finalizer '%s'",tostring(name))
+ fn = function() end
+ end
+ return fn
+end
--- instead of using functions we just parse a few names which saves a call
--- later on
+setmetatable(finalizers.xml, { __index = fallback })
+setmetatable(finalizers.tex, { __index = fallback })
-local lp_position = P("position()") / "ps"
-local lp_index = P("index()") / "id"
-local lp_text = P("text()") / "tx"
-local lp_name = P("name()") / "(ns~='' and ns..':'..tg)" -- "((rt.ns~='' and rt.ns..':'..rt.tg) or '')"
-local lp_tag = P("tag()") / "tg" -- (rt.tg or '')
-local lp_ns = P("ns()") / "ns" -- (rt.ns or '')
-local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==")
-local lp_doequal = P("=") / "=="
-local lp_attribute = P("@") / "" * Cc("(at['") * R("az","AZ","--","__")^1 * Cc("'] or '')")
+xml.defaultprotocol = "xml"
-local lp_lua_function = C(R("az","AZ","--","__")^1 * (P(".") * R("az","AZ","--","__")^1)^1) * P("(") / function(t) -- todo: better . handling
- return t .. "("
-end
+-- as xsl does not follow xpath completely here we will also
+-- be more liberal especially with regards to the use of | and
+-- the rootpath:
+--
+-- test : all 'test' under current
+-- /test : 'test' relative to current
+-- a|b|c : set of names
+-- (a|b|c) : idem
+-- ! : not
+--
+-- after all, we're not doing transformations but filtering. in
+-- addition we provide filter functions (last bit)
+--
+-- todo: optimizer
+--
+-- .. : parent
+-- * : all kids
+-- / : anchor here
+-- // : /**/
+-- ** : all in between
+--
+-- so far we had (more practical as we don't transform)
+--
+-- {/test} : kids 'test' under current node
+-- {test} : any kid with tag 'test'
+-- {//test} : same as above
-local lp_function = C(R("az","AZ","--","__")^1) * P("(") / function(t) -- todo: better . handling
- if expressions[t] then
- return "expressions." .. t .. "("
- else
- return "expressions.error("
- end
-end
+-- evaluator (needs to be redone, for the moment copied)
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
-local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+-- todo: apply_axis(list,notable) and collection vs single
--- if we use a dedicated namespace then we don't need to pass rt and k
+local apply_axis = { }
-local lp_special = (C(P("name")+P("text")+P("tag"))) * value / function(t,s)
- if expressions[t] then
- if s then
- return "expressions." .. t .. "(r,k," .. s ..")"
- else
- return "expressions." .. t .. "(r,k)"
+apply_axis['root'] = function(list)
+ local collected = { }
+ for l=1,#list do
+ local ll = list[l]
+ local rt = ll
+ while ll do
+ ll = ll.__p__
+ if ll then
+ rt = ll
+ end
end
- else
- return "expressions.error(" .. t .. ")"
+ collected[#collected+1] = rt
end
+ return collected
end
-local converter = lpeg.Cs ( (
- lp_position +
- lp_index +
- lp_text + lp_name + -- fast one
- lp_special +
- lp_noequal + lp_doequal +
- lp_attribute +
- lp_lua_function +
- lp_function +
-1 )^1 )
-
--- expressions,root,rootdt,k,e,edt,ns,tg,idx,hsh[tg] or 1
+apply_axis['self'] = function(list)
+--~ local collected = { }
+--~ for l=1,#list do
+--~ collected[#collected+1] = list[l]
+--~ end
+--~ return collected
+ return list
+end
-local template = [[
- return function(expressions,r,d,k,e,dt,ns,tg,id,ps)
- local at, tx = e.at or { }, dt[1] or ""
- return %s
+apply_axis['child'] = function(list)
+ local collected = { }
+ for l=1,#list do
+ local ll = list[l]
+ local dt = ll.dt
+ local en = 0
+ for k=1,#dt do
+ local dk = dt[k]
+ if dk.tg then
+ collected[#collected+1] = dk
+ dk.ni = k -- refresh
+ en = en + 1
+ dk.ei = en
+ end
+ end
+ ll.en = en
end
-]]
-
-local function make_expression(str)
- str = converter:match(str)
- return str, loadstring(format(template,str))()
-end
-
-local map = { }
-
-local space = S(' \r\n\t')
-local squote = S("'")
-local dquote = S('"')
-local lparent = P('(')
-local rparent = P(')')
-local atsign = P('@')
-local lbracket = P('[')
-local rbracket = P(']')
-local exclam = P('!')
-local period = P('.')
-local eq = P('==') + P('=')
-local ne = P('<>') + P('!=')
-local star = P('*')
-local slash = P('/')
-local colon = P(':')
-local bar = P('|')
-local hat = P('^')
-local valid = R('az', 'AZ', '09') + S('_-')
-local name_yes = C(valid^1 + star) * colon * C(valid^1 + star) -- permits ns:* *:tg *:*
-local name_nop = Cc("*") * C(valid^1)
-local name = name_yes + name_nop
-local number = C((S('+-')^0 * R('09')^1)) / tonumber
-local names = (bar^0 * name)^1
-local morenames = name * (bar^0 * name)^1
-local instructiontag = P('pi::')
-local spacing = C(space^0)
-local somespace = space^1
-local optionalspace = space^0
-local text = C(valid^0)
-local value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote)
-local empty = 1-slash
-
-local is_eq = lbracket * atsign * name * eq * value * rbracket
-local is_ne = lbracket * atsign * name * ne * value * rbracket
-local is_attribute = lbracket * atsign * name * rbracket
-local is_value = lbracket * value * rbracket
-local is_number = lbracket * number * rbracket
-
-local nobracket = 1-(lbracket+rbracket) -- must be improved
-local is_expression = lbracket * C(((C(nobracket^1))/make_expression)) * rbracket
-
-local is_expression = lbracket * (C(nobracket^1))/make_expression * rbracket
-
-local is_one = name
-local is_none = exclam * name
-local is_one_of = ((lparent * names * rparent) + morenames)
-local is_none_of = exclam * ((lparent * names * rparent) + morenames)
-
-local stay = (period )
-local parent = (period * period ) / function( ) map[#map+1] = { 11 } end
-local subtreeroot = (slash + hat ) / function( ) map[#map+1] = { 12 } end
-local documentroot = (hat * hat ) / function( ) map[#map+1] = { 13 } end
-local any = (star ) / function( ) map[#map+1] = { 14 } end
-local many = (star * star ) / function( ) map[#map+1] = { 15 } end
-local initial = (hat * hat * hat ) / function( ) map[#map+1] = { 16 } end
-
-local match = (is_one ) / function(...) map[#map+1] = { 20, true , ... } end
-local match_one_of = (is_one_of ) / function(...) map[#map+1] = { 21, true , ... } end
-local dont_match = (is_none ) / function(...) map[#map+1] = { 20, false, ... } end
-local dont_match_one_of = (is_none_of ) / function(...) map[#map+1] = { 21, false, ... } end
-
-local match_and_eq = (is_one * is_eq ) / function(...) map[#map+1] = { 22, true , ... } end
-local match_and_ne = (is_one * is_ne ) / function(...) map[#map+1] = { 23, true , ... } end
-local dont_match_and_eq = (is_none * is_eq ) / function(...) map[#map+1] = { 22, false, ... } end
-local dont_match_and_ne = (is_none * is_ne ) / function(...) map[#map+1] = { 23, false, ... } end
-
-local match_one_of_and_eq = (is_one_of * is_eq ) / function(...) map[#map+1] = { 24, true , ... } end
-local match_one_of_and_ne = (is_one_of * is_ne ) / function(...) map[#map+1] = { 25, true , ... } end
-local dont_match_one_of_and_eq = (is_none_of * is_eq ) / function(...) map[#map+1] = { 24, false, ... } end
-local dont_match_one_of_and_ne = (is_none_of * is_ne ) / function(...) map[#map+1] = { 25, false, ... } end
-
-local has_attribute = (is_one * is_attribute) / function(...) map[#map+1] = { 27, true , ... } end
-local has_value = (is_one * is_value ) / function(...) map[#map+1] = { 28, true , ... } end
-local dont_has_attribute = (is_none * is_attribute) / function(...) map[#map+1] = { 27, false, ... } end
-local dont_has_value = (is_none * is_value ) / function(...) map[#map+1] = { 28, false, ... } end
-local position = (is_one * is_number ) / function(...) map[#map+1] = { 30, true, ... } end
-local dont_position = (is_none * is_number ) / function(...) map[#map+1] = { 30, false, ... } end
-
-local expression = (is_one * is_expression)/ function(...) map[#map+1] = { 31, true, ... } end
-local dont_expression = (is_none * is_expression)/ function(...) map[#map+1] = { 31, false, ... } end
-
-local self_expression = ( is_expression) / function(...) if #map == 0 then map[#map+1] = { 11 } end
- map[#map+1] = { 31, true, "*", "*", ... } end
-local dont_self_expression = (exclam * is_expression) / function(...) if #map == 0 then map[#map+1] = { 11 } end
- map[#map+1] = { 31, false, "*", "*", ... } end
-
-local instruction = (instructiontag * text ) / function(...) map[#map+1] = { 40, ... } end
-local nothing = (empty ) / function( ) map[#map+1] = { 15 } end -- 15 ?
-local crap = (1-slash)^1
-
--- a few ugly goodies:
-
-local docroottag = P('^^') / function( ) map[#map+1] = { 12 } end
-local subroottag = P('^') / function( ) map[#map+1] = { 13 } end
-local roottag = P('root::') / function( ) map[#map+1] = { 12 } end
-local parenttag = P('parent::') / function( ) map[#map+1] = { 11 } end
-local childtag = P('child::')
-local selftag = P('self::')
-
--- there will be more and order will be optimized
-
-local selector = (
- instruction +
--- many + any + -- brrr, not here !
- parent + stay +
- dont_position + position +
- dont_match_one_of_and_eq + dont_match_one_of_and_ne +
- match_one_of_and_eq + match_one_of_and_ne +
- dont_match_and_eq + dont_match_and_ne +
- match_and_eq + match_and_ne +
- dont_expression + expression +
- dont_self_expression + self_expression +
- has_attribute + has_value +
- dont_match_one_of + match_one_of +
- dont_match + match +
- many + any +
- crap + empty
-)
-
-local grammar = P { "startup",
- startup = (initial + documentroot + subtreeroot + roottag + docroottag + subroottag)^0 * V("followup"),
- followup = ((slash + parenttag + childtag + selftag)^0 * selector)^1,
-}
+ return collected
+end
-local function compose(str)
- if not str or str == "" then
- -- wildcard
- return true
- elseif str == '/' then
- -- root
- return false
- else
- map = { }
- grammar:match(str)
- if #map == 0 then
- return true
- else
- local m = map[1][1]
- if #map == 1 then
- if m == 14 or m == 15 then
- -- wildcard
- return true
- elseif m == 12 then
- -- root
- return false
- end
- elseif #map == 2 and m == 12 and map[2][1] == 20 then
- -- return { { 29, map[2][2], map[2][3], map[2][4], map[2][5] } }
- map[2][1] = 29
- return { map[2] }
- end
- if m ~= 11 and m ~= 12 and m ~= 13 and m ~= 14 and m ~= 15 and m ~= 16 then
- insert(map, 1, { 16 })
+local function collect(list,collected)
+ local dt = list.dt
+ if dt then
+ local en = 0
+ for k=1,#dt do
+ local dk = dt[k]
+ if dk.tg then
+ collected[#collected+1] = dk
+ dk.ni = k -- refresh
+ en = en + 1
+ dk.ei = en
+ collect(dk,collected)
end
- -- print(gsub(table.serialize(map),"[ \n]+"," "))
- return map
end
+ list.en = en
end
end
+apply_axis['descendant'] = function(list)
+ local collected = { }
+ for l=1,#list do
+ collect(list[l],collected)
+ end
+ return collected
+end
-local cache = { }
-
-function xml.lpath(pattern,trace)
- lpathcalls = lpathcalls + 1
- if type(pattern) == "string" then
- local result = cache[pattern]
- if result == nil then -- can be false which is valid -)
- result = compose(pattern)
- cache[pattern] = result
- lpathcached = lpathcached + 1
+local function collect(list,collected)
+ local dt = list.dt
+ if dt then
+ local en = 0
+ for k=1,#dt do
+ local dk = dt[k]
+ if dk.tg then
+ collected[#collected+1] = dk
+ dk.ni = k -- refresh
+ en = en + 1
+ dk.ei = en
+ collect(dk,collected)
+ end
end
- if trace or trace_lpath then
- xml.lshow(result)
+ list.en = en
+ end
+end
+apply_axis['descendant-or-self'] = function(list)
+ local collected = { }
+ for l=1,#list do
+ local ll = list[l]
+ if ll.special ~= true then -- catch double root
+ collected[#collected+1] = ll
end
- return result
- else
- return pattern
+ collect(ll,collected)
end
+ return collected
end
-function xml.cached_patterns()
- return cache
+apply_axis['ancestor'] = function(list)
+ local collected = { }
+ for l=1,#list do
+ local ll = list[l]
+ while ll do
+ ll = ll.__p__
+ if ll then
+ collected[#collected+1] = ll
+ end
+ end
+ end
+ return collected
end
--- we run out of locals (limited to 200)
---
--- local fallbackreport = (texio and texio.write) or io.write
-
-function xml.lshow(pattern,report)
--- report = report or fallbackreport
- report = report or (texio and texio.write) or io.write
- local lp = xml.lpath(pattern)
- if lp == false then
- report(" -: root\n")
- elseif lp == true then
- report(" -: wildcard\n")
- else
- if type(pattern) == "string" then
- report(format("pattern: %s\n",pattern))
- end
- for k=1,#lp do
- local v = lp[k]
- if #v > 1 then
- local t = { }
- for i=2,#v do
- local vv = v[i]
- if type(vv) == "string" then
- t[#t+1] = (vv ~= "" and vv) or "#"
- elseif type(vv) == "boolean" then
- t[#t+1] = (vv and "==") or "<>"
- end
- end
- report(format("%2i: %s %s -> %s\n", k,v[1],actions[v[1]],concat(t," ")))
- else
- report(format("%2i: %s %s\n", k,v[1],actions[v[1]]))
+apply_axis['ancestor-or-self'] = function(list)
+ local collected = { }
+ for l=1,#list do
+ local ll = list[l]
+ collected[#collected+1] = ll
+ while ll do
+ ll = ll.__p__
+ if ll then
+ collected[#collected+1] = ll
end
end
end
+ return collected
end
-function xml.xshow(e,...) -- also handy when report is given, use () to isolate first e
- local t = { ... }
--- local report = (type(t[#t]) == "function" and t[#t]) or fallbackreport
- local report = (type(t[#t]) == "function" and t[#t]) or (texio and texio.write) or io.write
- if e == nil then
- report("<!-- no element -->\n")
- elseif type(e) ~= "table" then
- report(tostring(e))
- elseif e.tg then
- report(tostring(e) .. "\n")
- else
- for i=1,#e do
- report(tostring(e[i]) .. "\n")
+apply_axis['parent'] = function(list)
+ local collected = { }
+ for l=1,#list do
+ local pl = list[l].__p__
+ if pl then
+ collected[#collected+1] = pl
end
end
+ return collected
end
---[[ldx--
-<p>An <l n='lpath'/> is converted to a table with instructions for traversing the
-tree. Hoever, simple cases are signaled by booleans. Because we don't know in
-advance what we want to do with the found element the handle gets three arguments:</p>
-
-<lines>
-<t>r</t> : the root element of the data table
-<t>d</t> : the data table of the result
-<t>t</t> : the index in the data table of the result
-</lines>
-
-<p> Access to the root and data table makes it possible to construct insert and delete
-functions.</p>
---ldx]]--
-
-local functions = xml.functions
-local expressions = xml.expressions
-
-expressions.contains = string.find
-expressions.find = string.find
-expressions.upper = string.upper
-expressions.lower = string.lower
-expressions.number = tonumber
-expressions.boolean = toboolean
-
-expressions.oneof = function(s,...) -- slow
- local t = {...} for i=1,#t do if s == t[i] then return true end end return false
+apply_axis['attribute'] = function(list)
+ return { }
end
-expressions.error = function(str)
- xml.error_handler("unknown function in lpath expression",str or "?")
- return false
+apply_axis['namespace'] = function(list)
+ return { }
end
-functions.text = function(root,k,n) -- unchecked, maybe one deeper
- local t = type(t)
- if t == "string" then
- return t
- else -- todo n
- local rdt = root.dt
- return (rdt and rdt[k]) or root[k] or ""
- end
+apply_axis['following'] = function(list) -- incomplete
+--~ local collected = { }
+--~ for l=1,#list do
+--~ local ll = list[l]
+--~ local p = ll.__p__
+--~ local d = p.dt
+--~ for i=ll.ni+1,#d do
+--~ local di = d[i]
+--~ if type(di) == "table" then
+--~ collected[#collected+1] = di
+--~ break
+--~ end
+--~ end
+--~ end
+--~ return collected
+ return { }
+end
+
+apply_axis['preceding'] = function(list) -- incomplete
+--~ local collected = { }
+--~ for l=1,#list do
+--~ local ll = list[l]
+--~ local p = ll.__p__
+--~ local d = p.dt
+--~ for i=ll.ni-1,1,-1 do
+--~ local di = d[i]
+--~ if type(di) == "table" then
+--~ collected[#collected+1] = di
+--~ break
+--~ end
+--~ end
+--~ end
+--~ return collected
+ return { }
end
-functions.name = function(d,k,n) -- ns + tg
- local found = false
- n = n or 0
- if not k then
- -- not found
- elseif n == 0 then
- local dk = d[k]
- found = dk and (type(dk) == "table") and dk
- elseif n < 0 then
- for i=k-1,1,-1 do
- local di = d[i]
- if type(di) == "table" then
- if n == -1 then
- found = di
- break
- else
- n = n + 1
- end
- end
- end
- else
- for i=k+1,#d,1 do
+apply_axis['following-sibling'] = function(list)
+ local collected = { }
+ for l=1,#list do
+ local ll = list[l]
+ local p = ll.__p__
+ local d = p.dt
+ for i=ll.ni+1,#d do
local di = d[i]
if type(di) == "table" then
- if n == 1 then
- found = di
- break
- else
- n = n - 1
- end
+ collected[#collected+1] = di
end
end
end
- if found then
- local ns, tg = found.rn or found.ns or "", found.tg
- if ns ~= "" then
- return ns .. ":" .. tg
- else
- return tg
- end
- else
- return ""
- end
+ return collected
end
-functions.tag = function(d,k,n) -- only tg
- local found = false
- n = n or 0
- if not k then
- -- not found
- elseif n == 0 then
- local dk = d[k]
- found = dk and (type(dk) == "table") and dk
- elseif n < 0 then
- for i=k-1,1,-1 do
+apply_axis['preceding-sibling'] = function(list)
+ local collected = { }
+ for l=1,#list do
+ local ll = list[l]
+ local p = ll.__p__
+ local d = p.dt
+ for i=1,ll.ni-1 do
local di = d[i]
if type(di) == "table" then
- if n == -1 then
- found = di
- break
- else
- n = n + 1
- end
+ collected[#collected+1] = di
end
end
- else
- for i=k+1,#d,1 do
+ end
+ return collected
+end
+
+apply_axis['reverse-sibling'] = function(list) -- reverse preceding
+ local collected = { }
+ for l=1,#list do
+ local ll = list[l]
+ local p = ll.__p__
+ local d = p.dt
+ for i=ll.ni-1,1,-1 do
local di = d[i]
if type(di) == "table" then
- if n == 1 then
- found = di
- break
- else
- n = n - 1
- end
+ collected[#collected+1] = di
end
end
end
- return (found and found.tg) or ""
+ return collected
end
-expressions.text = functions.text
-expressions.name = functions.name
-expressions.tag = functions.tag
+apply_axis['auto-descendant-or-self'] = apply_axis['descendant-or-self']
+apply_axis['auto-descendant'] = apply_axis['descendant']
+apply_axis['auto-child'] = apply_axis['child']
+apply_axis['auto-self'] = apply_axis['self']
+apply_axis['initial-child'] = apply_axis['child']
-local function traverse(root,pattern,handle,reverse,index,parent,wildcard) -- multiple only for tags, not for namespaces
- if not root then -- error
- return false
- elseif pattern == false then -- root
- handle(root,root.dt,root.ri)
- return false
- elseif pattern == true then -- wildcard
- local rootdt = root.dt
- if rootdt then
- local start, stop, step = 1, #rootdt, 1
- if reverse then
- start, stop, step = stop, start, -1
- end
- for k=start,stop,step do
- if handle(root,rootdt,root.ri or k) then return false end
- if not traverse(rootdt[k],true,handle,reverse) then return false end
- end
- end
- return false
- elseif root.dt then
- index = index or 1
- local action = pattern[index]
- local command = action[1]
- if command == 29 then -- fast case /oeps
- local rootdt = root.dt
- for k=1,#rootdt do
- local e = rootdt[k]
- local tg = e.tg
- if e.tg then
- local ns = e.rn or e.ns
- local ns_a, tg_a = action[3], action[4]
- local matched = (ns_a == "*" or ns == ns_a) and (tg_a == "*" or tg == tg_a)
- if not action[2] then matched = not matched end
- if matched then
- if handle(root,rootdt,k) then return false end
- end
- end
- end
- elseif command == 11 then -- parent
- local ep = root.__p__ or parent
- if index < #pattern then
- if not traverse(ep,pattern,handle,reverse,index+1,root) then return false end
- elseif handle(root,rootdt,k) then
- return false
+local function apply_nodes(list,directive,nodes)
+ -- todo: nodes[1] etc ... negated node name in set ... when needed
+ -- ... currently ignored
+ local maxn = #nodes
+ if maxn == 3 then --optimized loop
+ local nns, ntg = nodes[2], nodes[3]
+ if not nns and not ntg then -- wildcard
+ if directive then
+ return list
+ else
+ return { }
end
else
- if (command == 16 or command == 12) and index == 1 then -- initial
- -- wildcard = true
- wildcard = command == 16 -- ok?
- index = index + 1
- action = pattern[index]
- command = action and action[1] or 0 -- something is wrong
- end
- if command == 11 then -- parent
- local ep = root.__p__ or parent
- if index < #pattern then
- if not traverse(ep,pattern,handle,reverse,index+1,root) then return false end
- elseif handle(root,rootdt,k) then
- return false
- end
- else
- local rootdt = root.dt
- local start, stop, step, n, dn = 1, #rootdt, 1, 0, 1
- if command == 30 then
- if action[5] < 0 then
- start, stop, step = stop, start, -1
- dn = -1
- end
- elseif reverse and index == #pattern then
- start, stop, step = stop, start, -1
- end
- local idx = 0
- local hsh = { } -- this will slooow down the lot
- for k=start,stop,step do -- we used to have functions for all but a case is faster
- local e = rootdt[k]
- local ns, tg = e.rn or e.ns, e.tg
- if tg then
- -- we can optimize this for simple searches, but it probably does not pay off
- hsh[tg] = (hsh[tg] or 0) + 1
- idx = idx + 1
- if command == 30 then
- local ns_a, tg_a = action[3], action[4]
- if tg == tg_a then
- matched = ns_a == "*" or ns == ns_a
- elseif tg_a == '*' then
- matched, multiple = ns_a == "*" or ns == ns_a, true
- else
- matched = false
- end
- if not action[2] then matched = not matched end
- if matched then
- n = n + dn
- if n == action[5] then
- if index == #pattern then
- if handle(root,rootdt,root.ri or k) then return false end
- else
- if not traverse(e,pattern,handle,reverse,index+1,root) then return false end
- end
- break
- end
- elseif wildcard then
- if not traverse(e,pattern,handle,reverse,index,root,true) then return false end
+ local collected, m, p = { }, 0, nil
+ if not nns then -- only check tag
+ for l=1,#list do
+ local ll = list[l]
+ local ltg = ll.tg
+ if ltg then
+ if directive then
+ if ntg == ltg then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ collected[#collected+1], ll.mi = ll, m
end
- else
- local matched, multiple = false, false
- if command == 20 then -- match
- local ns_a, tg_a = action[3], action[4]
- if tg == tg_a then
- matched = ns_a == "*" or ns == ns_a
- elseif tg_a == '*' then
- matched, multiple = ns_a == "*" or ns == ns_a, true
- else
- matched = false
- end
- if not action[2] then matched = not matched end
- elseif command == 21 then -- match one of
- multiple = true
- for i=3,#action,2 do
- local ns_a, tg_a = action[i], action[i+1]
- if (ns_a == "*" or ns == ns_a) and (tg == "*" or tg == tg_a) then
- matched = true
- break
- end
- end
- if not action[2] then matched = not matched end
- elseif command == 22 then -- eq
- local ns_a, tg_a = action[3], action[4]
- if tg == tg_a then
- matched = ns_a == "*" or ns == ns_a
- elseif tg_a == '*' then
- matched, multiple = ns_a == "*" or ns == ns_a, true
- else
- matched = false
- end
- matched = matched and e.at[action[6]] == action[7]
- elseif command == 23 then -- ne
- local ns_a, tg_a = action[3], action[4]
- if tg == tg_a then
- matched = ns_a == "*" or ns == ns_a
- elseif tg_a == '*' then
- matched, multiple = ns_a == "*" or ns == ns_a, true
- else
- matched = false
- end
- if not action[2] then matched = not matched end
- matched = mached and e.at[action[6]] ~= action[7]
- elseif command == 24 then -- one of eq
- multiple = true
- for i=3,#action-2,2 do
- local ns_a, tg_a = action[i], action[i+1]
- if (ns_a == "*" or ns == ns_a) and (tg == "*" or tg == tg_a) then
- matched = true
- break
- end
- end
- if not action[2] then matched = not matched end
- matched = matched and e.at[action[#action-1]] == action[#action]
- elseif command == 25 then -- one of ne
- multiple = true
- for i=3,#action-2,2 do
- local ns_a, tg_a = action[i], action[i+1]
- if (ns_a == "*" or ns == ns_a) and (tg == "*" or tg == tg_a) then
- matched = true
- break
- end
- end
- if not action[2] then matched = not matched end
- matched = matched and e.at[action[#action-1]] ~= action[#action]
- elseif command == 27 then -- has attribute
- local ns_a, tg_a = action[3], action[4]
- if tg == tg_a then
- matched = ns_a == "*" or ns == ns_a
- elseif tg_a == '*' then
- matched, multiple = ns_a == "*" or ns == ns_a, true
- else
- matched = false
- end
- if not action[2] then matched = not matched end
- matched = matched and e.at[action[5]]
- elseif command == 28 then -- has value
- local edt, ns_a, tg_a = e.dt, action[3], action[4]
- if tg == tg_a then
- matched = ns_a == "*" or ns == ns_a
- elseif tg_a == '*' then
- matched, multiple = ns_a == "*" or ns == ns_a, true
- else
- matched = false
- end
- if not action[2] then matched = not matched end
- matched = matched and edt and edt[1] == action[5]
- elseif command == 31 then
- local edt, ns_a, tg_a = e.dt, action[3], action[4]
- if tg == tg_a then
- matched = ns_a == "*" or ns == ns_a
- elseif tg_a == '*' then
- matched, multiple = ns_a == "*" or ns == ns_a, true
- else
- matched = false
- end
- if not action[2] then matched = not matched end
- if matched then
- matched = action[6](expressions,root,rootdt,k,e,edt,ns,tg,idx,hsh[tg] or 1)
- end
- end
- if matched then -- combine tg test and at test
- if index == #pattern then
- if handle(root,rootdt,root.ri or k) then return false end
- if wildcard then
- if multiple then
- if not traverse(e,pattern,handle,reverse,index,root,true) then return false end
- else
- -- maybe or multiple; anyhow, check on (section|title) vs just section and title in example in lxml
- if not traverse(e,pattern,handle,reverse,index,root) then return false end
- end
- end
- else
- if not traverse(e,pattern,handle,reverse,index+1,root) then return false end
- end
- elseif command == 14 then -- any
- if index == #pattern then
- if handle(root,rootdt,root.ri or k) then return false end
- else
- if not traverse(e,pattern,handle,reverse,index+1,root) then return false end
- end
- elseif command == 15 then -- many
- if index == #pattern then
- if handle(root,rootdt,root.ri or k) then return false end
- else
- if not traverse(e,pattern,handle,reverse,index+1,root,true) then return false end
- end
- -- not here : 11
- elseif command == 11 then -- parent
- local ep = e.__p__ or parent
- if index < #pattern then
- if not traverse(ep,pattern,handle,reverse,root,index+1) then return false end
- elseif handle(root,rootdt,k) then
- return false
- end
- elseif command == 40 and e.special and tg == "@pi@" then -- pi
- local pi = action[2]
- if pi ~= "" then
- local pt = e.dt[1]
- if pt and pt:find(pi) then
- if handle(root,rootdt,k) then
- return false
- end
- end
- elseif handle(root,rootdt,k) then
- return false
- end
- elseif wildcard then
- if not traverse(e,pattern,handle,reverse,index,root,true) then return false end
+ elseif ntg ~= ltg then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ collected[#collected+1], ll.mi = ll, m
+ end
+ end
+ end
+ elseif not ntg then -- only check namespace
+ for l=1,#list do
+ local ll = list[l]
+ local lns = ll.rn or ll.ns
+ if lns then
+ if directive then
+ if lns == nns then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ collected[#collected+1], ll.mi = ll, m
end
+ elseif lns ~= nns then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ collected[#collected+1], ll.mi = ll, m
end
- else
- -- not here : 11
- if command == 11 then -- parent
- local ep = e.__p__ or parent
- if index < #pattern then
- if not traverse(ep,pattern,handle,reverse,index+1,root) then return false end
- elseif handle(root,rootdt,k) then
- return false
+ end
+ end
+ else -- check both
+ for l=1,#list do
+ local ll = list[l]
+ local ltg = ll.tg
+ if ltg then
+ local lns = ll.rn or ll.ns
+ local ok = ltg == ntg and lns == nns
+ if directive then
+ if ok then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ collected[#collected+1], ll.mi = ll, m
end
- break -- else loop
+ elseif not ok then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ collected[#collected+1], ll.mi = ll, m
end
end
end
end
+ return collected
end
+ else
+ local collected, m, p = { }, 0, nil
+ for l=1,#list do
+ local ll = list[l]
+ local ltg = ll.tg
+ if ltg then
+ local lns = ll.rn or ll.ns
+ local ok = false
+ for n=1,maxn,3 do
+ local nns, ntg = nodes[n+1], nodes[n+2]
+ ok = (not ntg or ltg == ntg) and (not nns or lns == nns)
+ if ok then
+ break
+ end
+ end
+ if directive then
+ if ok then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ collected[#collected+1], ll.mi = ll, m
+ end
+ elseif not ok then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ collected[#collected+1], ll.mi = ll, m
+ end
+ end
+ end
+ return collected
end
- return true
end
-xml.traverse = traverse
+local quit_expression = false
---[[ldx--
-<p>Next come all kind of locators and manipulators. The most generic function here
-is <t>xml.filter(root,pattern)</t>. All registers functions in the filters namespace
-can be path of a search path, as in:</p>
+local function apply_expression(list,expression,order)
+ local collected = { }
+ quit_expression = false
+ for l=1,#list do
+ local ll = list[l]
+ if expression(list,ll,l,order) then -- nasty, order alleen valid als n=1
+ collected[#collected+1] = ll
+ end
+ if quit_expression then
+ break
+ end
+ end
+ return collected
+end
+
+local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb
+
+local spaces = S(" \n\r\t\f")^0
+local lp_space = S(" \n\r\t\f")
+local lp_any = P(1)
+local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==")
+local lp_doequal = P("=") / "=="
+local lp_or = P("|") / " or "
+local lp_and = P("&") / " and "
+
+local lp_builtin = P (
+ P("firstindex") / "1" +
+ P("lastindex") / "(#ll.__p__.dt or 1)" +
+ P("firstelement") / "1" +
+ P("lastelement") / "(ll.__p__.en or 1)" +
+ P("first") / "1" +
+ P("last") / "#list" +
+ P("rootposition") / "order" +
+ P("position") / "l" + -- is element in finalizer
+ P("order") / "order" +
+ P("element") / "(ll.ei or 1)" +
+ P("index") / "(ll.ni or 1)" +
+ P("match") / "(ll.mi or 1)" +
+ P("text") / "(ll.dt[1] or '')" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("ns") / "ll.ns"
+ ) * ((spaces * P("(") * spaces * P(")"))/"")
+
+local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * R("az","AZ","--","__")^1 * Cc("'])")
+local lp_fastpos_p = ((P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end)
+local lp_fastpos_n = ((P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end)
+local lp_fastpos = lp_fastpos_n + lp_fastpos_p
+local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false")
+
+local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
+ return t .. "("
+end
-<typing>
-local r, d, k = xml.filter(root,"/a/b/c/position(4)"
-</typing>
---ldx]]--
+local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling
+ if expressions[t] then
+ return "expr." .. t .. "("
+ else
+ return "expr.error("
+ end
+end
-local traverse, lpath, convert = xml.traverse, xml.lpath, xml.convert
+local lparent = lpeg.P("(")
+local rparent = lpeg.P(")")
+local noparent = 1 - (lparent+rparent)
+local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
+local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
-xml.filters = { }
+local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
+local lp_number = S("+-") * R("09")^1
+local lp_string = Cc("'") * R("az","AZ","--","__")^1 * Cc("'")
+local lp_content = (P("'") * (1-P("'"))^0 * P("'") + P('"') * (1-P('"'))^0 * P('"'))
-function xml.filters.default(root,pattern)
- local rt, dt, dk
- traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end)
- return dt and dt[dk], rt, dt, dk
-end
+local cleaner
-function xml.filters.attributes(root,pattern,arguments)
- local rt, dt, dk
- traverse(root, lpath(pattern), function(r,d,k) rt, dt, dk = r, d, k return true end)
- local ekat = (dt and dt[dk] and dt[dk].at) or (rt and rt.at)
- if ekat then
- if arguments then
- return ekat[arguments] or "", rt, dt, dk
+local lp_special = (C(P("name")+P("text")+P("tag")+P("count")+P("child"))) * value / function(t,s)
+ if expressions[t] then
+ s = s and s ~= "" and lpegmatch(cleaner,s)
+ if s and s ~= "" then
+ return "expr." .. t .. "(ll," .. s ..")"
else
- return ekat, rt, dt, dk
+ return "expr." .. t .. "(ll)"
end
else
- return { }, rt, dt, dk
+ return "expr.error(" .. t .. ")"
end
end
-function xml.filters.reverse(root,pattern)
- local rt, dt, dk
- traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end, 'reverse')
- return dt and dt[dk], rt, dt, dk
-end
+local content =
+ lp_builtin +
+ lp_attribute +
+ lp_special +
+ lp_noequal + lp_doequal +
+ lp_or + lp_and +
+ lp_reserved +
+ lp_lua_function + lp_function +
+ lp_content + -- too fragile
+ lp_child +
+ lp_any
+
+local converter = Cs (
+ lp_fastpos + (P { lparent * (V(1))^0 * rparent + content } )^0
+)
-function xml.filters.count(root,pattern,everything)
- local n = 0
- traverse(root, lpath(pattern), function(r,d,t)
- if everything or type(d[t]) == "table" then
- n = n + 1
- end
- end)
- return n
-end
+cleaner = Cs ( (
+--~ lp_fastpos +
+ lp_reserved +
+ lp_number +
+ lp_string +
+1 )^1 )
-function xml.filters.elements(root, pattern) -- == all
- local t = { }
- traverse(root, lpath(pattern), function(r,d,k)
- local e = d[k]
- if e then
- t[#t+1] = e
- end
- end)
- return t
-end
-function xml.filters.texts(root, pattern)
- local t = { }
- traverse(root, lpath(pattern), function(r,d,k)
- local e = d[k]
- if e and e.dt then
- t[#t+1] = e.dt
- end
- end)
- return t
-end
+--~ expr
-function xml.filters.first(root,pattern)
- local rt, dt, dk
- traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end)
- return dt and dt[dk], rt, dt, dk
-end
+local template_e = [[
+ local expr = xml.expressions
+ return function(list,ll,l,order)
+ return %s
+ end
+]]
-function xml.filters.last(root,pattern)
- local rt, dt, dk
- traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end, 'reverse')
- return dt and dt[dk], rt, dt, dk
-end
+local template_f_y = [[
+ local finalizer = xml.finalizers['%s']['%s']
+ return function(collection)
+ return finalizer(collection,%s)
+ end
+]]
-function xml.filters.index(root,pattern,arguments)
- local rt, dt, dk, reverse, i = nil, nil, nil, false, tonumber(arguments or '1') or 1
- if i and i ~= 0 then
- if i < 0 then
- reverse, i = true, -i
- end
- traverse(root, lpath(pattern), function(r,d,k) rt, dt, dk, i = r, d, k, i-1 return i == 0 end, reverse)
- if i == 0 then
- return dt and dt[dk], rt, dt, dk
- end
+local template_f_n = [[
+ return xml.finalizers['%s']['%s']
+]]
+
+--
+
+local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] }
+local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] }
+local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] }
+local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] }
+local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] }
+local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] }
+local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] }
+local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] }
+local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] }
+local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] }
+local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] }
+local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] }
+local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] }
+local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] }
+local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] }
+
+local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] }
+local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] }
+local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] }
+local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] }
+
+local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] }
+
+local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } }
+
+local skip = { }
+
+local function errorrunner_e(str,cnv)
+ if not skip[str] then
+ logs.report("lpath","error in expression: %s => %s",str,cnv)
+ skip[str] = cnv or str
end
- return nil, nil, nil, nil
+ return false
+end
+local function errorrunner_f(str,arg)
+ logs.report("lpath","error in finalizer: %s(%s)",str,arg or "")
+ return false
end
-function xml.filters.attribute(root,pattern,arguments)
- local rt, dt, dk
- traverse(root, lpath(pattern), function(r,d,k) rt, dt, dk = r, d, k return true end)
- local ekat = (dt and dt[dk] and dt[dk].at) or (rt and rt.at)
- return (ekat and (ekat[arguments] or ekat[gsub(arguments,"^([\"\'])(.*)%1$","%2")])) or ""
+local function register_nodes(nodetest,nodes)
+ return { kind = "nodes", nodetest = nodetest, nodes = nodes }
end
-function xml.filters.text(root,pattern,arguments) -- ?? why index, tostring slow
- local dtk, rt, dt, dk = xml.filters.index(root,pattern,arguments)
- if dtk then -- n
- local dtkdt = dtk.dt
- if not dtkdt then
- return "", rt, dt, dk
- elseif #dtkdt == 1 and type(dtkdt[1]) == "string" then
- return dtkdt[1], rt, dt, dk
- else
- return xml.tostring(dtkdt), rt, dt, dk
- end
+local function register_expression(expression)
+ local converted = lpegmatch(converter,expression)
+ local runner = loadstring(format(template_e,converted))
+ runner = (runner and runner()) or function() errorrunner_e(expression,converted) end
+ return { kind = "expression", expression = expression, converted = converted, evaluator = runner }
+end
+
+local function register_finalizer(protocol,name,arguments)
+ local runner
+ if arguments and arguments ~= "" then
+ runner = loadstring(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
else
- return "", rt, dt, dk
+ runner = loadstring(format(template_f_n,protocol or xml.defaultprotocol,name))
end
+ runner = (runner and runner()) or function() errorrunner_f(name,arguments) end
+ return { kind = "finalizer", name = name, arguments = arguments, finalizer = runner }
end
-function xml.filters.tag(root,pattern,n)
- local tag = ""
- traverse(root, lpath(pattern), function(r,d,k)
- tag = xml.functions.tag(d,k,n and tonumber(n))
- return true
- end)
- return tag
-end
+local expression = P { "ex",
+ ex = "[" * C((V("sq") + V("dq") + (1 - S("[]")) + V("ex"))^0) * "]",
+ sq = "'" * (1 - S("'"))^0 * "'",
+ dq = '"' * (1 - S('"'))^0 * '"',
+}
-function xml.filters.name(root,pattern,n)
- local tag = ""
- traverse(root, lpath(pattern), function(r,d,k)
- tag = xml.functions.name(d,k,n and tonumber(n))
- return true
- end)
- return tag
+local arguments = P { "ar",
+ ar = "(" * Cs((V("sq") + V("dq") + V("nq") + P(1-P(")")))^0) * ")",
+ nq = ((1 - S("),'\""))^1) / function(s) return format("%q",s) end,
+ sq = P("'") * (1 - P("'"))^0 * P("'"),
+ dq = P('"') * (1 - P('"'))^0 * P('"'),
+}
+
+-- todo: better arg parser
+
+local function register_error(str)
+ return { kind = "error", error = format("unparsed: %s",str) }
end
---[[ldx--
-<p>For splitting the filter function from the path specification, we can
-use string matching or lpeg matching. Here the difference in speed is
-neglectable but the lpeg variant is more robust.</p>
---ldx]]--
+-- there is a difference in * and /*/ and so we need to catch a few special cases
+
+local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed
+local special_2 = P("/") * Cc(register_auto_self)
+local special_3 = P("") * Cc(register_auto_self)
+
+local parser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+
+ patterns = spaces * V("protocol") * spaces * (
+ ( V("special") * spaces * P(-1) ) +
+ ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 )
+ ),
+
+ protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"),
+
+ -- the / is needed for // as descendant or self is somewhat special
+ -- step = (V("shortcuts") + V("axis") * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0,
+ step = ((V("shortcuts") + P("/") + V("axis")) * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0,
+
+ axis = V("descendant") + V("child") + V("parent") + V("self") + V("root") + V("ancestor") +
+ V("descendant_or_self") + V("following_sibling") + V("following") +
+ V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") +
+ #(1-P(-1)) * Cc(register_auto_child),
+
+ special = special_1 + special_2 + special_3,
--- not faster but hipper ... although ... i can't get rid of the trailing / in the path
+ initial = (P("/") * spaces * Cc(register_initial_child))^-1,
-local P, S, R, C, V, Cc = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc
+ error = (P(1)^1) / register_error,
-local slash = P('/')
-local name = (R("az","AZ","--","__"))^1
-local path = C(((1-slash)^0 * slash)^1)
-local argument = P { "(" * C(((1 - S("()")) + V(1))^0) * ")" }
-local action = Cc(1) * path * C(name) * argument
-local attribute = Cc(2) * path * P('@') * C(name)
-local direct = Cc(3) * Cc("../*") * slash^0 * C(name) * argument
+ shortcuts_a = V("s_descendant_or_self") + V("s_descendant") + V("s_child") + V("s_parent") + V("s_self") + V("s_root") + V("s_ancestor"),
-local parser = direct + action + attribute
+ shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
-local filters = xml.filters
-local attribute_filter = xml.filters.attributes
-local default_filter = xml.filters.default
+ s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
+ -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
+ s_descendant = P("**") * Cc(register_descendant),
+ s_child = P("*") * #(1-P(":")) * Cc(register_child ),
+-- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_parent = P("..") * Cc(register_parent ),
+ s_self = P("." ) * Cc(register_self ),
+ s_root = P("^^") * Cc(register_root ),
+ s_ancestor = P("^") * Cc(register_ancestor ),
--- todo: also hash, could be gc'd
+ descendant = P("descendant::") * Cc(register_descendant ),
+ child = P("child::") * Cc(register_child ),
+ parent = P("parent::") * Cc(register_parent ),
+ self = P("self::") * Cc(register_self ),
+ root = P('root::') * Cc(register_root ),
+ ancestor = P('ancestor::') * Cc(register_ancestor ),
+ descendant_or_self = P('descendant-or-self::') * Cc(register_descendant_or_self ),
+ ancestor_or_self = P('ancestor-or-self::') * Cc(register_ancestor_or_self ),
+ -- attribute = P('attribute::') * Cc(register_attribute ),
+ -- namespace = P('namespace::') * Cc(register_namespace ),
+ following = P('following::') * Cc(register_following ),
+ following_sibling = P('following-sibling::') * Cc(register_following_sibling ),
+ preceding = P('preceding::') * Cc(register_preceding ),
+ preceding_sibling = P('preceding-sibling::') * Cc(register_preceding_sibling ),
+ reverse_sibling = P('reverse-sibling::') * Cc(register_reverse_sibling ),
+
+ nodes = (V("nodefunction") * spaces * P("(") * V("nodeset") * P(")") + V("nodetest") * V("nodeset")) / register_nodes,
+
+ expressions = expression / register_expression,
+
+ letters = R("az")^1,
+ name = (1-lpeg.S("/[]()|:*!"))^1,
+ negate = P("!") * Cc(false),
+
+ nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
+ nodetest = V("negate") + Cc(true),
+ nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
+
+ finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
+
+}
+
+local cache = { }
-function xml.filter(root,pattern)
- local kind, a, b, c = parser:match(pattern)
- if kind == 1 or kind == 3 then
- return (filters[b] or default_filter)(root,a,c)
- elseif kind == 2 then
- return attribute_filter(root,a,b)
+local function nodesettostring(set,nodetest)
+ local t = { }
+ for i=1,#set,3 do
+ local directive, ns, tg = set[i], set[i+1], set[i+2]
+ if not ns or ns == "" then ns = "*" end
+ if not tg or tg == "" then tg = "*" end
+ tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
+ t[#t+1] = (directive and tg) or format("not(%s)",tg)
+ end
+ if nodetest == false then
+ return format("not(%s)",concat(t,"|"))
else
- return default_filter(root,pattern)
+ return concat(t,"|")
end
end
---~ slightly faster, but first we need a proper test file
---~
---~ local hash = { }
---~
---~ function xml.filter(root,pattern)
---~ local h = hash[pattern]
---~ if not h then
---~ local kind, a, b, c = parser:match(pattern)
---~ if kind == 1 then
---~ h = { kind, filters[b] or default_filter, a, b, c }
---~ elseif kind == 2 then
---~ h = { kind, attribute_filter, a, b, c }
---~ else
---~ h = { kind, default_filter, a, b, c }
---~ end
---~ hash[pattern] = h
---~ end
---~ local kind = h[1]
---~ if kind == 1 then
---~ return h[2](root,h[2],h[4])
---~ elseif kind == 2 then
---~ return h[2](root,h[2],h[3])
---~ else
---~ return h[2](root,pattern)
---~ end
---~ end
+local function tagstostring(list)
+ if #list == 0 then
+ return "no elements"
+ else
+ local t = { }
+ for i=1, #list do
+ local li = list[i]
+ local ns, tg = li.ns, li.tg
+ if not ns or ns == "" then ns = "*" end
+ if not tg or tg == "" then tg = "*" end
+ t[#t+1] = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
+ end
+ return concat(t," ")
+ end
+end
---[[ldx--
-<p>The following functions collect elements and texts.</p>
---ldx]]--
+xml.nodesettostring = nodesettostring
--- still somewhat bugged
+local parse_pattern -- we have a harmless kind of circular reference
-function xml.collect_elements(root, pattern, ignorespaces)
- local rr, dd = { }, { }
- traverse(root, lpath(pattern), function(r,d,k)
- local dk = d and d[k]
- if dk then
- if ignorespaces and type(dk) == "string" and dk:find("[^%S]") then
- -- ignore
- else
- local n = #rr+1
- rr[n], dd[n] = r, dk
- end
- end
- end)
- return dd, rr
+local function lshow(parsed)
+ if type(parsed) == "string" then
+ parsed = parse_pattern(parsed)
+ end
+ local s = table.serialize_functions -- ugly
+ table.serialize_functions = false -- ugly
+ logs.report("lpath","%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,table.serialize(parsed,false))
+ table.serialize_functions = s -- ugly
end
-function xml.collect_texts(root, pattern, flatten)
- local t = { } -- no r collector
- traverse(root, lpath(pattern), function(r,d,k)
- if d then
- local ek = d[k]
- local tx = ek and ek.dt
- if flatten then
- if tx then
- t[#t+1] = xml.tostring(tx) or ""
+xml.lshow = lshow
+
+local function add_comment(p,str)
+ local pc = p.comment
+ if not pc then
+ p.comment = { str }
+ else
+ pc[#pc+1] = str
+ end
+end
+
+parse_pattern = function (pattern) -- the gain of caching is rather minimal
+ lpathcalls = lpathcalls + 1
+ if type(pattern) == "table" then
+ return pattern
+ else
+ local parsed = cache[pattern]
+ if parsed then
+ lpathcached = lpathcached + 1
+ else
+ parsed = lpegmatch(parser,pattern)
+ if parsed then
+ parsed.pattern = pattern
+ local np = #parsed
+ if np == 0 then
+ parsed = { pattern = pattern, register_self, state = "parsing error" }
+ logs.report("lpath","parsing error in '%s'",pattern)
+ lshow(parsed)
else
- t[#t+1] = ""
+ -- we could have done this with a more complex parser but this
+ -- is cleaner
+ local pi = parsed[1]
+ if pi.axis == "auto-child" then
+ if false then
+ add_comment(parsed, "auto-child replaced by auto-descendant-or-self")
+ parsed[1] = register_auto_descendant_or_self
+ else
+ add_comment(parsed, "auto-child replaced by auto-descendant")
+ parsed[1] = register_auto_descendant
+ end
+ elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then
+ add_comment(parsed, "initial-child removed") -- we could also make it a auto-self
+ remove(parsed,1)
+ end
+ local np = #parsed -- can have changed
+ if np > 1 then
+ local pnp = parsed[np]
+ if pnp.kind == "nodes" and pnp.nodetest == true then
+ local nodes = pnp.nodes
+ if nodes[1] == true and nodes[2] == false and nodes[3] == false then
+ add_comment(parsed, "redundant final wildcard filter removed")
+ remove(parsed,np)
+ end
+ end
+ end
end
else
- t[#t+1] = tx or ""
+ parsed = { pattern = pattern }
+ end
+ cache[pattern] = parsed
+ if trace_lparse and not trace_lprofile then
+ lshow(parsed)
end
- else
- t[#t+1] = ""
end
- end)
- return t
+ return parsed
+ end
end
-function xml.collect_tags(root, pattern, nonamespace)
- local t = { }
- xml.traverse(root, xml.lpath(pattern), function(r,d,k)
- local dk = d and d[k]
- if dk and type(dk) == "table" then
- local ns, tg = e.ns, e.tg
- if nonamespace then
- t[#t+1] = tg -- if needed we can return an extra table
- elseif ns == "" then
- t[#t+1] = tg
- else
- t[#t+1] = ns .. ":" .. tg
+-- we can move all calls inline and then merge the trace back
+-- technically we can combine axis and the next nodes which is
+-- what we did before but this a bit cleaner (but slower too)
+-- but interesting is that it's not that much faster when we
+-- go inline
+--
+-- beware: we need to return a collection even when we filter
+-- else the (simple) cache gets messed up
+
+-- caching found lookups saves not that much (max .1 sec on a 8 sec run)
+-- and it also messes up finalizers
+
+-- watch out: when there is a finalizer, it's always called as there
+-- can be cases that a finalizer returns (or does) something in case
+-- there is no match; an example of this is count()
+
+local profiled = { } xml.profiled = profiled
+
+local function profiled_apply(list,parsed,nofparsed,order)
+ local p = profiled[parsed.pattern]
+ if p then
+ p.tested = p.tested + 1
+ else
+ p = { tested = 1, matched = 0, finalized = 0 }
+ profiled[parsed.pattern] = p
+ end
+ local collected = list
+ for i=1,nofparsed do
+ local pi = parsed[i]
+ local kind = pi.kind
+ if kind == "axis" then
+ collected = apply_axis[pi.axis](collected)
+ elseif kind == "nodes" then
+ collected = apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind == "expression" then
+ collected = apply_expression(collected,pi.evaluator,order)
+ elseif kind == "finalizer" then
+ collected = pi.finalizer(collected)
+ p.matched = p.matched + 1
+ p.finalized = p.finalized + 1
+ return collected
+ end
+ if not collected or #collected == 0 then
+ local pn = i < nofparsed and parsed[nofparsed]
+ if pn and pn.kind == "finalizer" then
+ collected = pn.finalizer(collected)
+ p.finalized = p.finalized + 1
+ return collected
end
+ return nil
end
- end)
- return #t > 0 and {}
+ end
+ if collected then
+ p.matched = p.matched + 1
+ end
+ return collected
+end
+
+local function traced_apply(list,parsed,nofparsed,order)
+ if trace_lparse then
+ lshow(parsed)
+ end
+ logs.report("lpath", "collecting : %s",parsed.pattern)
+ logs.report("lpath", " root tags : %s",tagstostring(list))
+ logs.report("lpath", " order : %s",order or "unset")
+ local collected = list
+ for i=1,nofparsed do
+ local pi = parsed[i]
+ local kind = pi.kind
+ if kind == "axis" then
+ collected = apply_axis[pi.axis](collected)
+ logs.report("lpath", "% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
+ elseif kind == "nodes" then
+ collected = apply_nodes(collected,pi.nodetest,pi.nodes)
+ logs.report("lpath", "% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
+ elseif kind == "expression" then
+ collected = apply_expression(collected,pi.evaluator,order)
+ logs.report("lpath", "% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
+ elseif kind == "finalizer" then
+ collected = pi.finalizer(collected)
+ logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ return collected
+ end
+ if not collected or #collected == 0 then
+ local pn = i < nofparsed and parsed[nofparsed]
+ if pn and pn.kind == "finalizer" then
+ collected = pn.finalizer(collected)
+ logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ return collected
+ end
+ return nil
+ end
+ end
+ return collected
end
---[[ldx--
-<p>Often using an iterators looks nicer in the code than passing handler
-functions. The <l n='lua'/> book describes how to use coroutines for that
-purpose (<url href='http://www.lua.org/pil/9.3.html'/>). This permits
-code like:</p>
-
-<typing>
-for r, d, k in xml.elements(xml.load('text.xml'),"title") do
- print(d[k])
+local function normal_apply(list,parsed,nofparsed,order)
+ local collected = list
+ for i=1,nofparsed do
+ local pi = parsed[i]
+ local kind = pi.kind
+ if kind == "axis" then
+ local axis = pi.axis
+ if axis ~= "self" then
+ collected = apply_axis[axis](collected)
+ end
+ elseif kind == "nodes" then
+ collected = apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind == "expression" then
+ collected = apply_expression(collected,pi.evaluator,order)
+ elseif kind == "finalizer" then
+ return pi.finalizer(collected)
+ end
+ if not collected or #collected == 0 then
+ local pf = i < nofparsed and parsed[nofparsed].finalizer
+ if pf then
+ return pf(collected) -- can be anything
+ end
+ return nil
+ end
+ end
+ return collected
end
-</typing>
-
-<p>Which will print all the titles in the document. The iterator variant takes
-1.5 times the runtime of the function variant which is due to the overhead in
-creating the wrapper. So, instead of:</p>
-<typing>
-function xml.filters.first(root,pattern)
- for rt,dt,dk in xml.elements(root,pattern)
- return dt and dt[dk], rt, dt, dk
+local function parse_apply(list,pattern)
+ -- we avoid an extra call
+ local parsed = cache[pattern]
+ if parsed then
+ lpathcalls = lpathcalls + 1
+ lpathcached = lpathcached + 1
+ elseif type(pattern) == "table" then
+ lpathcalls = lpathcalls + 1
+ parsed = pattern
+ else
+ parsed = parse_pattern(pattern) or pattern
+ end
+ if not parsed then
+ return
+ end
+ local nofparsed = #parsed
+ if nofparsed == 0 then
+ return -- something is wrong
+ end
+ local one = list[1]
+ if not one then
+ return -- something is wrong
+ elseif not trace_lpath then
+ return normal_apply(list,parsed,nofparsed,one.mi)
+ elseif trace_lprofile then
+ return profiled_apply(list,parsed,nofparsed,one.mi)
+ else
+ return traced_apply(list,parsed,nofparsed,one.mi)
end
- return nil, nil, nil, nil
end
-</typing>
-<p>We use the function variants in the filters.</p>
---ldx]]--
-
-local wrap, yield = coroutine.wrap, coroutine.yield
+-- internal (parsed)
-function xml.elements(root,pattern,reverse)
- return wrap(function() traverse(root, lpath(pattern), yield, reverse) end)
+expressions.child = function(e,pattern)
+ return parse_apply({ e },pattern) -- todo: cache
end
-
-function xml.elements_only(root,pattern,reverse)
- return wrap(function() traverse(root, lpath(pattern), function(r,d,k) yield(d[k]) end, reverse) end)
+expressions.count = function(e,pattern)
+ local collected = parse_apply({ e },pattern) -- todo: cache
+ return (collected and #collected) or 0
end
-function xml.each_element(root, pattern, handle, reverse)
- local ok
- traverse(root, lpath(pattern), function(r,d,k) ok = true handle(r,d,k) end, reverse)
- return ok
+-- external
+
+expressions.oneof = function(s,...) -- slow
+ local t = {...} for i=1,#t do if s == t[i] then return true end end return false
+end
+expressions.error = function(str)
+ xml.error_handler("unknown function in lpath expression",tostring(str or "?"))
+ return false
+end
+expressions.undefined = function(s)
+ return s == nil
end
-function xml.process_elements(root, pattern, handle)
- traverse(root, lpath(pattern), function(r,d,k)
- local dkdt = d[k].dt
- if dkdt then
- for i=1,#dkdt do
- local v = dkdt[i]
- if v.tg then handle(v) end
- end
- end
- end)
+expressions.quit = function(s)
+ if s or s == nil then
+ quit_expression = true
+ end
+ return true
end
-function xml.process_attributes(root, pattern, handle)
- traverse(root, lpath(pattern), function(r,d,k)
- local ek = d[k]
- local a = ek.at or { }
- handle(a)
- if next(a) then -- next is faster than type (and >0 test)
- ek.at = a
- else
- ek.at = nil
- end
- end)
+expressions.print = function(...)
+ print(...)
+ return true
end
---[[ldx--
-<p>We've now arrives at the functions that manipulate the tree.</p>
---ldx]]--
+expressions.contains = find
+expressions.find = find
+expressions.upper = upper
+expressions.lower = lower
+expressions.number = tonumber
+expressions.boolean = toboolean
-function xml.inject_element(root, pattern, element, prepend)
- if root and element then
- local matches, collect = { }, nil
- if type(element) == "string" then
- element = convert(element,true)
- end
- if element then
- collect = function(r,d,k) matches[#matches+1] = { r, d, k, element } end
- traverse(root, lpath(pattern), collect)
- for i=1,#matches do
- local m = matches[i]
- local r, d, k, element, edt = m[1], m[2], m[3], m[4], nil
- if element.ri then
- element = element.dt[element.ri].dt
- else
- element = element.dt
- end
- if r.ri then
- edt = r.dt[r.ri].dt
- else
- edt = d and d[k] and d[k].dt
- end
- if edt then
- local be, af
- if prepend then
- be, af = xml.copy(element), edt
- else
- be, af = edt, xml.copy(element)
- end
- for i=1,#af do
- be[#be+1] = af[i]
- end
- if r.ri then
- r.dt[r.ri].dt = be
- else
- d[k].dt = be
- end
- else
- -- r.dt = element.dt -- todo
- end
- end
+-- user interface
+
+local function traverse(root,pattern,handle)
+ logs.report("xml","use 'xml.selection' instead for '%s'",pattern)
+ local collected = parse_apply({ root },pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local r = e.__p__
+ handle(r,r.dt,e.ni)
end
end
end
--- todo: copy !
-
-function xml.insert_element(root, pattern, element, before) -- todo: element als functie
- if root and element then
- if pattern == "/" then
- xml.inject_element(root, pattern, element, before)
- else
- local matches, collect = { }, nil
- if type(element) == "string" then
- element = convert(element,true)
- end
- if element and element.ri then
- element = element.dt[element.ri]
- end
- if element then
- collect = function(r,d,k) matches[#matches+1] = { r, d, k, element } end
- traverse(root, lpath(pattern), collect)
- for i=#matches,1,-1 do
- local m = matches[i]
- local r, d, k, element = m[1], m[2], m[3], m[4]
- if not before then k = k + 1 end
- if element.tg then
- insert(d,k,element) -- untested
---~ elseif element.dt then
---~ for _,v in ipairs(element.dt) do -- i added
---~ insert(d,k,v)
---~ k = k + 1
---~ end
---~ end
- else
- local edt = element.dt
- if edt then
- for i=1,#edt do
- insert(d,k,edt[i])
- k = k + 1
- end
- end
- end
- end
+local function selection(root,pattern,handle)
+ local collected = parse_apply({ root },pattern)
+ if collected then
+ if handle then
+ for c=1,#collected do
+ handle(collected[c])
end
+ else
+ return collected
end
end
end
-xml.insert_element_after = xml.insert_element
-xml.insert_element_before = function(r,p,e) xml.insert_element(r,p,e,true) end
-xml.inject_element_after = xml.inject_element
-xml.inject_element_before = function(r,p,e) xml.inject_element(r,p,e,true) end
+xml.parse_parser = parser
+xml.parse_pattern = parse_pattern
+xml.parse_apply = parse_apply
+xml.traverse = traverse -- old method, r, d, k
+xml.selection = selection -- new method, simple handle
-function xml.delete_element(root, pattern)
- local matches, deleted = { }, { }
- local collect = function(r,d,k) matches[#matches+1] = { r, d, k } end
- traverse(root, lpath(pattern), collect)
- for i=#matches,1,-1 do
- local m = matches[i]
- deleted[#deleted+1] = remove(m[2],m[3])
- end
- return deleted
+local lpath = parse_pattern
+
+xml.lpath = lpath
+
+function xml.cached_patterns()
+ return cache
end
-function xml.replace_element(root, pattern, element)
- if type(element) == "string" then
- element = convert(element,true)
- end
- if element and element.ri then
- element = element.dt[element.ri]
- end
- if element then
- traverse(root, lpath(pattern), function(rm, d, k)
- d[k] = element.dt -- maybe not clever enough
- end)
+-- generic function finalizer (independant namespace)
+
+local function dofunction(collected,fnc)
+ if collected then
+ local f = functions[fnc]
+ if f then
+ for c=1,#collected do
+ f(collected[c])
+ end
+ else
+ logs.report("xml","unknown function '%s'",fnc)
+ end
end
end
-local function load_data(name) -- == io.loaddata
- local f, data = io.open(name), ""
- if f then
- data = f:read("*all",'b') -- 'b' ?
- f:close()
- end
- return data
+xml.finalizers.xml["function"] = dofunction
+xml.finalizers.tex["function"] = dofunction
+
+-- functions
+
+expressions.text = function(e,n)
+ local rdt = e.__p__.dt
+ return (rdt and rdt[n]) or ""
end
-function xml.include(xmldata,pattern,attribute,recursive,loaddata)
- -- parse="text" (default: xml), encoding="" (todo)
- -- attribute = attribute or 'href'
- pattern = pattern or 'include'
- loaddata = loaddata or load_data
- local function include(r,d,k)
- local ek, name = d[k], nil
- if not attribute or attribute == "" then
- local ekdt = ek.dt
- name = (type(ekdt) == "table" and ekdt[1]) or ekdt
- end
- if not name then
- if ek.at then
- for a in gmatch(attribute or "href","([^|]+)") do
- name = ek.at[a]
- if name then break end
+expressions.name = function(e,n) -- ns + tg
+ local found = false
+ n = tonumber(n) or 0
+ if n == 0 then
+ found = type(e) == "table" and e
+ elseif n < 0 then
+ local d, k = e.__p__.dt, e.ni
+ for i=k-1,1,-1 do
+ local di = d[i]
+ if type(di) == "table" then
+ if n == -1 then
+ found = di
+ break
+ else
+ n = n + 1
end
end
end
- local data = (name and name ~= "" and loaddata(name)) or ""
- if data == "" then
- xml.empty(d,k)
- elseif ek.at["parse"] == "text" then -- for the moment hard coded
- d[k] = xml.escaped(data)
- else
- local xi = xml.convert(data)
- if not xi then
- xml.empty(d,k)
- else
- if recursive then
- xml.include(xi,pattern,attribute,recursive,loaddata)
+ else
+ local d, k = e.__p__.dt, e.ni
+ for i=k+1,#d,1 do
+ local di = d[i]
+ if type(di) == "table" then
+ if n == 1 then
+ found = di
+ break
+ else
+ n = n - 1
end
- xml.assign(d,k,xi)
end
end
end
- xml.each_element(xmldata, pattern, include)
+ if found then
+ local ns, tg = found.rn or found.ns or "", found.tg
+ if ns ~= "" then
+ return ns .. ":" .. tg
+ else
+ return tg
+ end
+ else
+ return ""
+ end
end
-function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and trailing space !
- traverse(root, lpath(pattern), function(r,d,k)
- local dkdt = d[k].dt
- if dkdt then -- can be optimized
- local t = { }
- for i=1,#dkdt do
- local str = dkdt[i]
- if type(str) == "string" then
-
- if str == "" then
- -- stripped
+expressions.tag = function(e,n) -- only tg
+ if not e then
+ return ""
+ else
+ local found = false
+ n = tonumber(n) or 0
+ if n == 0 then
+ found = (type(e) == "table") and e -- seems to fail
+ elseif n < 0 then
+ local d, k = e.__p__.dt, e.ni
+ for i=k-1,1,-1 do
+ local di = d[i]
+ if type(di) == "table" then
+ if n == -1 then
+ found = di
+ break
else
- if nolines then
- str = gsub(str,"[ \n\r\t]+"," ")
- end
- if str == "" then
- -- stripped
- else
- t[#t+1] = str
- end
+ n = n + 1
end
- else
- t[#t+1] = str
end
end
- d[k].dt = t
- end
- end)
-end
-
-local function rename_space(root, oldspace, newspace) -- fast variant
- local ndt = #root.dt
- for i=1,ndt or 0 do
- local e = root[i]
- if type(e) == "table" then
- if e.ns == oldspace then
- e.ns = newspace
- if e.rn then
- e.rn = newspace
+ else
+ local d, k = e.__p__.dt, e.ni
+ for i=k+1,#d,1 do
+ local di = d[i]
+ if type(di) == "table" then
+ if n == 1 then
+ found = di
+ break
+ else
+ n = n - 1
+ end
end
end
- local edt = e.dt
- if edt then
- rename_space(edt, oldspace, newspace)
- end
end
+ return (found and found.tg) or ""
end
end
-xml.rename_space = rename_space
+--[[ldx--
+<p>This is the main filter function. It returns whatever is asked for.</p>
+--ldx]]--
-function xml.remap_tag(root, pattern, newtg)
- traverse(root, lpath(pattern), function(r,d,k)
- d[k].tg = newtg
- end)
+function xml.filter(root,pattern) -- no longer funny attribute handling here
+ return parse_apply({ root },pattern)
end
-function xml.remap_namespace(root, pattern, newns)
- traverse(root, lpath(pattern), function(r,d,k)
- d[k].ns = newns
- end)
+
+--[[ldx--
+<p>Often using an iterators looks nicer in the code than passing handler
+functions. The <l n='lua'/> book describes how to use coroutines for that
+purpose (<url href='http://www.lua.org/pil/9.3.html'/>). This permits
+code like:</p>
+
+<typing>
+for r, d, k in xml.elements(xml.load('text.xml'),"title") do
+ print(d[k]) -- old method
end
-function xml.check_namespace(root, pattern, newns)
- traverse(root, lpath(pattern), function(r,d,k)
- local dk = d[k]
- if (not dk.rn or dk.rn == "") and dk.ns == "" then
- dk.rn = newns
- end
- end)
+for e in xml.collected(xml.load('text.xml'),"title") do
+ print(e) -- new one
end
-function xml.remap_name(root, pattern, newtg, newns, newrn)
- traverse(root, lpath(pattern), function(r,d,k)
- local dk = d[k]
- dk.tg = newtg
- dk.ns = newns
- dk.rn = newrn
- end)
+</typing>
+--ldx]]--
+
+local wrap, yield = coroutine.wrap, coroutine.yield
+
+function xml.elements(root,pattern,reverse) -- r, d, k
+ local collected = parse_apply({ root },pattern)
+ if collected then
+ if reverse then
+ return wrap(function() for c=#collected,1,-1 do
+ local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
+ end end)
+ else
+ return wrap(function() for c=1,#collected do
+ local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
+ end end)
+ end
+ end
+ return wrap(function() end)
end
-function xml.filters.found(root,pattern,check_content)
- local found = false
- traverse(root, lpath(pattern), function(r,d,k)
- if check_content then
- local dk = d and d[k]
- found = dk and dk.dt and next(dk.dt) and true
+function xml.collected(root,pattern,reverse) -- e
+ local collected = parse_apply({ root },pattern)
+ if collected then
+ if reverse then
+ return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end)
else
- found = true
+ return wrap(function() for c=1,#collected do yield(collected[c]) end end)
end
- return true
- end)
- return found
+ end
+ return wrap(function() end)
end
---[[ldx--
-<p>Here are a few synonyms.</p>
---ldx]]--
-xml.filters.position = xml.filters.index
+end -- of closure
-xml.count = xml.filters.count
-xml.index = xml.filters.index
-xml.position = xml.filters.index
-xml.first = xml.filters.first
-xml.last = xml.filters.last
-xml.found = xml.filters.found
+do -- create closure to overcome 200 locals limit
-xml.each = xml.each_element
-xml.process = xml.process_element
-xml.strip = xml.strip_whitespace
-xml.collect = xml.collect_elements
-xml.all = xml.collect_elements
+if not modules then modules = { } end modules ['lxml-mis'] = {
+ version = 1.001,
+ comment = "this module is the basis for the lxml-* ones",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
-xml.insert = xml.insert_element_after
-xml.inject = xml.inject_element_after
-xml.after = xml.insert_element_after
-xml.before = xml.insert_element_before
-xml.delete = xml.delete_element
-xml.replace = xml.replace_element
+local concat = table.concat
+local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
+local format, gsub, match = string.format, string.gsub, string.match
+local lpegmatch = lpeg.match
--[[ldx--
-<p>The following helper functions best belong to the <t>lmxl-ini</t>
+<p>The following helper functions best belong to the <t>lxml-ini</t>
module. Some are here because we need then in the <t>mk</t>
document and other manuals, others came up when playing with
this module. Since this module is also used in <l n='mtxrun'/> we've
put them here instead of loading mode modules there then needed.</p>
--ldx]]--
-function xml.gsub(t,old,new)
+local function xmlgsub(t,old,new) -- will be replaced
local dt = t.dt
if dt then
for k=1,#dt do
@@ -5069,28 +6366,26 @@ function xml.gsub(t,old,new)
if type(v) == "string" then
dt[k] = gsub(v,old,new)
else
- xml.gsub(v,old,new)
+ xmlgsub(v,old,new)
end
end
end
end
+--~ xml.gsub = xmlgsub
+
function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual
- if d and k and d[k-1] and type(d[k-1]) == "string" then
- local s = d[k-1]:match("\n(%s+)")
- xml.gsub(dk,"\n"..string.rep(" ",#s),"\n")
+ if d and k then
+ local dkm = d[k-1]
+ if dkm and type(dkm) == "string" then
+ local s = match(dkm,"\n(%s+)")
+ xmlgsub(dk,"\n"..rep(" ",#s),"\n")
+ end
end
end
-function xml.serialize_path(root,lpath,handle)
- local dk, r, d, k = xml.first(root,lpath)
- dk = xml.copy(dk)
- xml.strip_leading_spaces(dk,d,k)
- xml.serialize(dk,handle)
-end
-
--~ xml.escapes = { ['&'] = '&amp;', ['<'] = '&lt;', ['>'] = '&gt;', ['"'] = '&quot;' }
---~ xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end
+--~ xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end
--~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end
--~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end
@@ -5114,8 +6409,6 @@ local escaped = Cs(normal * (special * normal)^0)
-- 100 * 1000 * "oeps&lt; oeps&gt; oeps&amp;" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto)
--- unescaped = Cs((S("&lt;")/"<" + S("&gt;")/">" + S("&amp;")/"&" + 1)^0)
--- unescaped = Cs((((P("&")/"") * (P("lt")/"<" + P("gt")/">" + P("amp")/"&") * (P(";")/"")) + 1)^0)
local normal = (1 - S"&")^0
local special = P("&lt;")/"<" + P("&gt;")/">" + P("&amp;")/"&"
local unescaped = Cs(normal * (special * normal)^0)
@@ -5124,84 +6417,32 @@ local unescaped = Cs(normal * (special * normal)^0)
local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0)
-function xml.escaped (str) return escaped :match(str) end
-function xml.unescaped(str) return unescaped:match(str) end
-function xml.cleansed (str) return cleansed :match(str) end
+xml.escaped_pattern = escaped
+xml.unescaped_pattern = unescaped
+xml.cleansed_pattern = cleansed
-function xml.join(t,separator,lastseparator)
- if #t > 0 then
- local result = { }
- for k,v in pairs(t) do
- result[k] = xml.tostring(v)
- end
- if lastseparator then
- return concat(result,separator or "",1,#result-1) .. (lastseparator or "") .. result[#result]
- else
- return concat(result,separator)
+function xml.escaped (str) return lpegmatch(escaped,str) end
+function xml.unescaped(str) return lpegmatch(unescaped,str) end
+function xml.cleansed (str) return lpegmatch(cleansed,str) end
+
+-- this might move
+
+function xml.fillin(root,pattern,str,check)
+ local e = xml.first(root,pattern)
+ if e then
+ local n = #e.dt
+ if not check or n == 0 or (n == 1 and e.dt[1] == "") then
+ e.dt = { str }
end
- else
- return ""
end
end
-function xml.statistics()
- return {
- lpathcalls = lpathcalls,
- lpathcached = lpathcached,
- }
-end
-
--- xml.set_text_cleanup(xml.show_text_entities)
--- xml.set_text_cleanup(xml.resolve_text_entities)
-
---~ xml.lshow("/../../../a/(b|c)[@d='e']/f")
---~ xml.lshow("/../../../a/!(b|c)[@d='e']/f")
---~ xml.lshow("/../../../a/!b[@d!='e']/f")
-
---~ x = xml.convert([[
---~ <a>
---~ <b n='01'>01</b>
---~ <b n='02'>02</b>
---~ <b n='03'>03</b>
---~ <b n='04'>OK</b>
---~ <b n='05'>05</b>
---~ <b n='06'>06</b>
---~ <b n='07'>ALSO OK</b>
---~ </a>
---~ ]])
-
---~ xml.settrace("lpath",true)
-
---~ xml.xshow(xml.first(x,"b[position() > 2 and position() < 5 and text() == 'ok']"))
---~ xml.xshow(xml.first(x,"b[position() > 2 and position() < 5 and text() == upper('ok')]"))
---~ xml.xshow(xml.first(x,"b[@n=='03' or @n=='08']"))
---~ xml.xshow(xml.all (x,"b[number(@n)>2 and number(@n)<6]"))
---~ xml.xshow(xml.first(x,"b[find(text(),'ALSO')]"))
-
---~ str = [[
---~ <?xml version="1.0" encoding="utf-8"?>
---~ <story line='mojca'>
---~ <windows>my secret</mouse>
---~ </story>
---~ ]]
-
---~ x = xml.convert([[
---~ <a><b n='01'>01</b><b n='02'>02</b><x>xx</x><b n='03'>03</b><b n='04'>OK</b></a>
---~ ]])
---~ xml.xshow(xml.first(x,"b[tag(2) == 'x']"))
---~ xml.xshow(xml.first(x,"b[tag(1) == 'x']"))
---~ xml.xshow(xml.first(x,"b[tag(-1) == 'x']"))
---~ xml.xshow(xml.first(x,"b[tag(-2) == 'x']"))
-
---~ print(xml.filter(x,"b/tag(2)"))
---~ print(xml.filter(x,"b/tag(1)"))
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-ent'] = {
+if not modules then modules = { } end modules ['lxml-aux'] = {
version = 1.001,
comment = "this module is the basis for the lxml-* ones",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -5209,457 +6450,836 @@ if not modules then modules = { } end modules ['lxml-ent'] = {
license = "see context related readme files"
}
-local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
-local format, gsub, find = string.format, string.gsub, string.find
-local utfchar = unicode.utf8.char
+-- not all functions here make sense anymore vbut we keep them for
+-- compatibility reasons
---[[ldx--
-<p>We provide (at least here) two entity handlers. The more extensive
-resolver consults a hash first, tries to convert to <l n='utf'/> next,
-and finaly calls a handler when defines. When this all fails, the
-original entity is returned.</p>
---ldx]]--
+local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end)
-xml.entities = xml.entities or { } -- xml.entity_handler == function
+local xmlparseapply, xmlconvert, xmlcopy, xmlname = xml.parse_apply, xml.convert, xml.copy, xml.name
+local xmlinheritedconvert = xml.inheritedconvert
-function xml.entity_handler(e)
- return format("[%s]",e)
-end
+local type = type
+local insert, remove = table.insert, table.remove
+local gmatch, gsub = string.gmatch, string.gsub
-local function toutf(s)
- return utfchar(tonumber(s,16))
+local function report(what,pattern,c,e)
+ logs.report("xml","%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
end
-local function utfize(root)
- local d = root.dt
- for k=1,#d do
- local dk = d[k]
- if type(dk) == "string" then
- -- test prevents copying if no match
- if find(dk,"&#x.-;") then
- d[k] = gsub(dk,"&#x(.-);",toutf)
+local function withelements(e,handle,depth)
+ if e and handle then
+ local edt = e.dt
+ if edt then
+ depth = depth or 0
+ for i=1,#edt do
+ local e = edt[i]
+ if type(e) == "table" then
+ handle(e,depth)
+ withelements(e,handle,depth+1)
+ end
end
- else
- utfize(dk)
end
end
end
-xml.utfize = utfize
+xml.withelements = withelements
-local function resolve(e) -- hex encoded always first, just to avoid mkii fallbacks
- if find(e,"^#x") then
- return utfchar(tonumber(e:sub(3),16))
- elseif find(e,"^#") then
- return utfchar(tonumber(e:sub(2)))
- else
- local ee = xml.entities[e] -- we cannot shortcut this one (is reloaded)
- if ee then
- return ee
- else
- local h = xml.entity_handler
- return (h and h(e)) or "&" .. e .. ";"
+function xml.withelement(e,n,handle) -- slow
+ if e and n ~= 0 and handle then
+ local edt = e.dt
+ if edt then
+ if n > 0 then
+ for i=1,#edt do
+ local ei = edt[i]
+ if type(ei) == "table" then
+ if n == 1 then
+ handle(ei)
+ return
+ else
+ n = n - 1
+ end
+ end
+ end
+ elseif n < 0 then
+ for i=#edt,1,-1 do
+ local ei = edt[i]
+ if type(ei) == "table" then
+ if n == -1 then
+ handle(ei)
+ return
+ else
+ n = n + 1
+ end
+ end
+ end
+ end
end
end
end
-local function resolve_entities(root)
- if not root.special or root.tg == "@rt@" then
- local d = root.dt
- for k=1,#d do
- local dk = d[k]
- if type(dk) == "string" then
- if find(dk,"&.-;") then
- d[k] = gsub(dk,"&(.-);",resolve)
- end
- else
- resolve_entities(dk)
+xml.elements_only = xml.collected
+
+function xml.each_element(root,pattern,handle,reverse)
+ local collected = xmlparseapply({ root },pattern)
+ if collected then
+ if reverse then
+ for c=#collected,1,-1 do
+ handle(collected[c])
+ end
+ else
+ for c=1,#collected do
+ handle(collected[c])
end
end
+ return collected
end
end
-xml.resolve_entities = resolve_entities
+xml.process_elements = xml.each_element
-function xml.utfize_text(str)
- if find(str,"&#") then
- return (gsub(str,"&#x(.-);",toutf))
- else
- return str
+function xml.process_attributes(root,pattern,handle)
+ local collected = xmlparseapply({ root },pattern)
+ if collected and handle then
+ for c=1,#collected do
+ handle(collected[c].at)
+ end
end
+ return collected
end
-function xml.resolve_text_entities(str) -- maybe an lpeg. maybe resolve inline
- if find(str,"&") then
- return (gsub(str,"&(.-);",resolve))
- else
- return str
- end
+--[[ldx--
+<p>The following functions collect elements and texts.</p>
+--ldx]]--
+
+-- are these still needed -> lxml-cmp.lua
+
+function xml.collect_elements(root, pattern)
+ return xmlparseapply({ root },pattern)
end
-function xml.show_text_entities(str)
- if find(str,"&") then
- return (gsub(str,"&(.-);","[%1]"))
- else
- return str
+function xml.collect_texts(root, pattern, flatten) -- todo: variant with handle
+ local collected = xmlparseapply({ root },pattern)
+ if collected and flatten then
+ local xmltostring = xml.tostring
+ for c=1,#collected do
+ collected[c] = xmltostring(collected[c].dt)
+ end
end
+ return collected or { }
end
--- experimental, this will be done differently
-
-function xml.merge_entities(root)
- local documententities = root.entities
- local allentities = xml.entities
- if documententities then
- for k, v in next, documententities do
- allentities[k] = v
+function xml.collect_tags(root, pattern, nonamespace)
+ local collected = xmlparseapply({ root },pattern)
+ if collected then
+ local t = { }
+ for c=1,#collected do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ if nonamespace then
+ t[#t+1] = tg
+ elseif ns == "" then
+ t[#t+1] = tg
+ else
+ t[#t+1] = ns .. ":" .. tg
+ end
end
+ return t
end
end
+--[[ldx--
+<p>We've now arrived at the functions that manipulate the tree.</p>
+--ldx]]--
-end -- of closure
+local no_root = { no_root = true }
-do -- create closure to overcome 200 locals limit
+function xml.redo_ni(d)
+ for k=1,#d do
+ local dk = d[k]
+ if type(dk) == "table" then
+ dk.ni = k
+ end
+ end
+end
-if not modules then modules = { } end modules ['lxml-mis'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+local function xmltoelement(whatever,root)
+ if not whatever then
+ return nil
+ end
+ local element
+ if type(whatever) == "string" then
+ element = xmlinheritedconvert(whatever,root)
+ else
+ element = whatever -- we assume a table
+ end
+ if element.error then
+ return whatever -- string
+ end
+ if element then
+ --~ if element.ri then
+ --~ element = element.dt[element.ri].dt
+ --~ else
+ --~ element = element.dt
+ --~ end
+ end
+ return element
+end
-local concat = table.concat
-local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
-local format, gsub = string.format, string.gsub
+xml.toelement = xmltoelement
---[[ldx--
-<p>The following helper functions best belong to the <t>lmxl-ini</t>
-module. Some are here because we need then in the <t>mk</t>
-document and other manuals, others came up when playing with
-this module. Since this module is also used in <l n='mtxrun'/> we've
-put them here instead of loading mode modules there then needed.</p>
---ldx]]--
+local function copiedelement(element,newparent)
+ if type(element) == "string" then
+ return element
+ else
+ element = xmlcopy(element).dt
+ if newparent and type(element) == "table" then
+ element.__p__ = newparent
+ end
+ return element
+ end
+end
-function xml.gsub(t,old,new)
- local dt = t.dt
- if dt then
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "string" then
- dt[k] = gsub(v,old,new)
- else
- xml.gsub(v,old,new)
+function xml.delete_element(root,pattern)
+ local collected = xmlparseapply({ root },pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ xml.redo_ni(d) -- can be made faster and inlined
end
end
end
end
-function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual
- if d and k and d[k-1] and type(d[k-1]) == "string" then
- local s = d[k-1]:match("\n(%s+)")
- xml.gsub(dk,"\n"..string.rep(" ",#s),"\n")
+function xml.replace_element(root,pattern,whatever)
+ local element = root and xmltoelement(whatever,root)
+ local collected = element and xmlparseapply({ root },pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('replacing',pattern,c,e)
+ end
+ local d = p.dt
+ d[e.ni] = copiedelement(element,p)
+ xml.redo_ni(d) -- probably not needed
+ end
+ end
end
end
-function xml.serialize_path(root,lpath,handle)
- local dk, r, d, k = xml.first(root,lpath)
- dk = xml.copy(dk)
- xml.strip_leading_spaces(dk,d,k)
- xml.serialize(dk,handle)
+local function inject_element(root,pattern,whatever,prepend)
+ local element = root and xmltoelement(whatever,root)
+ local collected = element and xmlparseapply({ root },pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
+ end
+ for i=1,#af do
+ be[#be+1] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ xml.redo_ni(d)
+ end
+ end
+ end
end
---~ xml.escapes = { ['&'] = '&amp;', ['<'] = '&lt;', ['>'] = '&gt;', ['"'] = '&quot;' }
---~ xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end
+local function insert_element(root,pattern,whatever,before) -- todo: element als functie
+ local element = root and xmltoelement(whatever,root)
+ local collected = element and xmlparseapply({ root },pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ xml.redo_ni(d)
+ end
+ end
+end
---~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end
---~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end
---~ function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>"
+xml.insert_element = insert_element
+xml.insert_element_after = insert_element
+xml.insert_element_before = function(r,p,e) insert_element(r,p,e,true) end
+xml.inject_element = inject_element
+xml.inject_element_after = inject_element
+xml.inject_element_before = function(r,p,e) inject_element(r,p,e,true) end
-local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs
+local function include(xmldata,pattern,attribute,recursive,loaddata)
+ -- parse="text" (default: xml), encoding="" (todo)
+ -- attribute = attribute or 'href'
+ pattern = pattern or 'include'
+ loaddata = loaddata or io.loaddata
+ local collected = xmlparseapply({ xmldata },pattern)
+ if collected then
+ for c=1,#collected do
+ local ek = collected[c]
+ local name = nil
+ local ekdt = ek.dt
+ local ekat = ek.at
+ local epdt = ek.__p__.dt
+ if not attribute or attribute == "" then
+ name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- ckeck, probably always tab or str
+ end
+ if not name then
+ for a in gmatch(attribute or "href","([^|]+)") do
+ name = ekat[a]
+ if name then break end
+ end
+ end
+ local data = (name and name ~= "" and loaddata(name)) or ""
+ if data == "" then
+ epdt[ek.ni] = "" -- xml.empty(d,k)
+ elseif ekat["parse"] == "text" then
+ -- for the moment hard coded
+ epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
+ else
+--~ local settings = xmldata.settings
+--~ settings.parent_root = xmldata -- to be tested
+--~ local xi = xmlconvert(data,settings)
+ local xi = xmlinheritedconvert(data,xmldata)
+ if not xi then
+ epdt[ek.ni] = "" -- xml.empty(d,k)
+ else
+ if recursive then
+ include(xi,pattern,attribute,recursive,loaddata)
+ end
+ epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi)
+ end
+ end
+ end
+ end
+end
--- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg
---
--- 1021:0335:0287:0247
+xml.include = include
--- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ"
---
--- 1559:0257:0288:0190 (last one suggested by roberto)
+--~ local function manipulate(xmldata,pattern,manipulator) -- untested and might go away
+--~ local collected = xmlparseapply({ xmldata },pattern)
+--~ if collected then
+--~ local xmltostring = xml.tostring
+--~ for c=1,#collected do
+--~ local e = collected[c]
+--~ local data = manipulator(xmltostring(e))
+--~ if data == "" then
+--~ epdt[e.ni] = ""
+--~ else
+--~ local xi = xmlinheritedconvert(data,xmldata)
+--~ if not xi then
+--~ epdt[e.ni] = ""
+--~ else
+--~ epdt[e.ni] = xml.body(xi) -- xml.assign(d,k,xi)
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
--- escaped = Cs((S("<&>") / xml.escapes + 1)^0)
--- escaped = Cs((S("<")/"&lt;" + S(">")/"&gt;" + S("&")/"&amp;" + 1)^0)
-local normal = (1 - S("<&>"))^0
-local special = P("<")/"&lt;" + P(">")/"&gt;" + P("&")/"&amp;"
-local escaped = Cs(normal * (special * normal)^0)
+--~ xml.manipulate = manipulate
--- 100 * 1000 * "oeps&lt; oeps&gt; oeps&amp;" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto)
+function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and trailing space !
+ local collected = xmlparseapply({ root },pattern)
+ if collected then
+ for i=1,#collected do
+ local e = collected[i]
+ local edt = e.dt
+ if edt then
+ local t = { }
+ for i=1,#edt do
+ local str = edt[i]
+ if type(str) == "string" then
+ if str == "" then
+ -- stripped
+ else
+ if nolines then
+ str = gsub(str,"[ \n\r\t]+"," ")
+ end
+ if str == "" then
+ -- stripped
+ else
+ t[#t+1] = str
+ end
+ end
+ else
+ --~ str.ni = i
+ t[#t+1] = str
+ end
+ end
+ e.dt = t
+ end
+ end
+ end
+end
--- unescaped = Cs((S("&lt;")/"<" + S("&gt;")/">" + S("&amp;")/"&" + 1)^0)
--- unescaped = Cs((((P("&")/"") * (P("lt")/"<" + P("gt")/">" + P("amp")/"&") * (P(";")/"")) + 1)^0)
-local normal = (1 - S"&")^0
-local special = P("&lt;")/"<" + P("&gt;")/">" + P("&amp;")/"&"
-local unescaped = Cs(normal * (special * normal)^0)
+function xml.strip_whitespace(root, pattern, nolines, anywhere) -- strips all leading and trailing spacing
+ local collected = xmlparseapply({ root },pattern) -- beware, indices no longer are valid now
+ if collected then
+ for i=1,#collected do
+ local e = collected[i]
+ local edt = e.dt
+ if edt then
+ if anywhere then
+ local t = { }
+ for e=1,#edt do
+ local str = edt[e]
+ if type(str) ~= "string" then
+ t[#t+1] = str
+ elseif str ~= "" then
+ -- todo: lpeg for each case
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"^%s*(.-)%s*$","%1")
+ if str ~= "" then
+ t[#t+1] = str
+ end
+ end
+ end
+ e.dt = t
+ else
+ -- we can assume a regular sparse xml table with no successive strings
+ -- otherwise we should use a while loop
+ if #edt > 0 then
+ -- strip front
+ local str = edt[1]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt,1)
+ else
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"^%s+","")
+ if str == "" then
+ remove(edt,1)
+ else
+ edt[1] = str
+ end
+ end
+ end
+ if #edt > 1 then
+ -- strip end
+ local str = edt[#edt]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt)
+ else
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"%s+$","")
+ if str == "" then
+ remove(edt)
+ else
+ edt[#edt] = str
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
--- 100 * 5000 * "oeps <oeps bla='oeps' foo='bar'> oeps </oeps> oeps " : gsub:lpeg == 623:501 msec (short tags, less difference)
+local function rename_space(root, oldspace, newspace) -- fast variant
+ local ndt = #root.dt
+ for i=1,ndt or 0 do
+ local e = root[i]
+ if type(e) == "table" then
+ if e.ns == oldspace then
+ e.ns = newspace
+ if e.rn then
+ e.rn = newspace
+ end
+ end
+ local edt = e.dt
+ if edt then
+ rename_space(edt, oldspace, newspace)
+ end
+ end
+ end
+end
-local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0)
+xml.rename_space = rename_space
-xml.escaped_pattern = escaped
-xml.unescaped_pattern = unescaped
-xml.cleansed_pattern = cleansed
+function xml.remap_tag(root, pattern, newtg)
+ local collected = xmlparseapply({ root },pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].tg = newtg
+ end
+ end
+end
-function xml.escaped (str) return escaped :match(str) end
-function xml.unescaped(str) return unescaped:match(str) end
-function xml.cleansed (str) return cleansed :match(str) end
+function xml.remap_namespace(root, pattern, newns)
+ local collected = xmlparseapply({ root },pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].ns = newns
+ end
+ end
+end
-function xml.join(t,separator,lastseparator)
- if #t > 0 then
- local result = { }
- for k,v in pairs(t) do
- result[k] = xml.tostring(v)
+function xml.check_namespace(root, pattern, newns)
+ local collected = xmlparseapply({ root },pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ if (not e.rn or e.rn == "") and e.ns == "" then
+ e.rn = newns
+ end
end
- if lastseparator then
- return concat(result,separator or "",1,#result-1) .. (lastseparator or "") .. result[#result]
- else
- return concat(result,separator)
+ end
+end
+
+function xml.remap_name(root, pattern, newtg, newns, newrn)
+ local collected = xmlparseapply({ root },pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ e.tg, e.ns, e.rn = newtg, newns, newrn
end
- else
- return ""
end
end
+--[[ldx--
+<p>Here are a few synonyms.</p>
+--ldx]]--
+
+xml.each = xml.each_element
+xml.process = xml.process_element
+xml.strip = xml.strip_whitespace
+xml.collect = xml.collect_elements
+xml.all = xml.collect_elements
+
+xml.insert = xml.insert_element_after
+xml.inject = xml.inject_element_after
+xml.after = xml.insert_element_after
+xml.before = xml.insert_element_before
+xml.delete = xml.delete_element
+xml.replace = xml.replace_element
+
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-tra'] = {
+if not modules then modules = { } end modules ['lxml-xml'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "this module is the basis for the lxml-* ones",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
--- the <anonymous> tag is kind of generic and used for functions that are not
--- bound to a variable, like node.new, node.copy etc (contrary to for instance
--- node.has_attribute which is bound to a has_attribute local variable in mkiv)
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
-debugger = debugger or { }
+local function first(collected) -- wrong ?
+ return collected and collected[1]
+end
-local counters = { }
-local names = { }
-local getinfo = debug.getinfo
-local format, find, lower, gmatch = string.format, string.find, string.lower, string.gmatch
+local function last(collected)
+ return collected and collected[#collected]
+end
--- one
+local function all(collected)
+ return collected
+end
-local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
- if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
+local function reverse(collected)
+ if collected then
+ local reversed = { }
+ for c=#collected,1,-1 do
+ reversed[#reversed+1] = collected[c]
end
+ return reversed
end
end
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
- else
- -- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
- end
- else
- return "unknown"
+
+local function attribute(collected,name)
+ if collected and #collected > 0 then
+ local at = collected[1].at
+ return at and at[name]
end
end
-function debugger.showstats(printer,threshold)
- printer = printer or texio.write or print
- threshold = threshold or 0
- local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in pairs(counters) do
- if count > threshold then
- local name = getname(func)
- if not name:find("for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
- end
- grandtotal = grandtotal + count
- functions = functions + 1
- end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+
+local function att(id,name)
+ local at = id.at
+ return at and at[name]
end
--- two
+local function count(collected)
+ return (collected and #collected) or 0
+end
---~ local function hook()
---~ local n = getinfo(2)
---~ if n.what=="C" and not n.name then
---~ local f = tostring(debug.traceback())
---~ local cf = counters[f]
---~ if cf == nil then
---~ counters[f] = 1
---~ names[f] = n
---~ else
---~ counters[f] = cf + 1
---~ end
---~ end
---~ end
---~ function debugger.showstats(printer,threshold)
---~ printer = printer or texio.write or print
---~ threshold = threshold or 0
---~ local total, grandtotal, functions = 0, 0, 0
---~ printer("\n") -- ugly but ok
---~ -- table.sort(counters)
---~ for func, count in pairs(counters) do
---~ if count > threshold then
---~ printer(format("%8i %s", count, func))
---~ total = total + count
---~ end
---~ grandtotal = grandtotal + count
---~ functions = functions + 1
---~ end
---~ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
---~ end
+local function position(collected,n)
+ if collected then
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[#collected + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
+ end
+ end
+end
--- rest
+local function match(collected)
+ return (collected and collected[1].mi) or 0 -- match
+end
-function debugger.savestats(filename,threshold)
- local f = io.open(filename,'w')
- if f then
- debugger.showstats(function(str) f:write(str) end,threshold)
- f:close()
+local function index(collected)
+ if collected then
+ return collected[1].ni
end
end
-function debugger.enable()
- debug.sethook(hook,"c")
+local function attributes(collected,arguments)
+ if collected then
+ local at = collected[1].at
+ if arguments then
+ return at[arguments]
+ elseif next(at) then
+ return at -- all of them
+ end
+ end
end
-function debugger.disable()
- debug.sethook()
---~ counters[debug.getinfo(2,"f").func] = nil
+local function chainattribute(collected,arguments) -- todo: optional levels
+ if collected then
+ local e = collected[1]
+ while e do
+ local at = e.at
+ if at then
+ local a = at[arguments]
+ if a then
+ return a
+ end
+ else
+ break -- error
+ end
+ e = e.__p__
+ end
+ end
+ return ""
end
-function debugger.tracing()
- local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0
- if n > 0 then
- function debugger.tracing() return true end ; return true
+local function raw(collected) -- hybrid
+ if collected then
+ local e = collected[1] or collected
+ return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
else
- function debugger.tracing() return false end ; return false
+ return ""
end
end
---~ debugger.enable()
-
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
-
---~ debugger.disable()
-
---~ print("")
---~ debugger.showstats()
---~ print("")
---~ debugger.showstats(print,3)
-
-trackers = trackers or { }
+local function text(collected) -- hybrid
+ if collected then
+ local e = collected[1] or collected
+ return (e and xmltostring(e.dt)) or ""
+ else
+ return ""
+ end
+end
-local data, done = { }, { }
+local function texts(collected)
+ if collected then
+ local t = { }
+ for c=1,#collected do
+ local e = collection[c]
+ if e and e.dt then
+ t[#t+1] = e.dt
+ end
+ end
+ return t
+ end
+end
-local function set(what,value)
- if type(what) == "string" then
- what = aux.settings_to_array(what)
+local function tag(collected,n)
+ if collected then
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[#collected-n+1]
+ end
+ return c and c.tg
end
- for i=1,#what do
- local w = what[i]
- for d, f in next, data do
- if done[d] then
- -- prevent recursion due to wildcards
- elseif find(d,w) then
- done[d] = true
- for i=1,#f do
- f[i](value)
- end
+end
+
+local function name(collected,n)
+ if collected then
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[#collected-n+1]
+ end
+ if c then
+ if c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
end
end
-local function reset()
- for d, f in next, data do
- for i=1,#f do
- f[i](false)
+local function tags(collected,nonamespace)
+ if collected then
+ local t = { }
+ for c=1,#collected do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ if nonamespace or ns == "" then
+ t[#t+1] = tg
+ else
+ t[#t+1] = ns .. ":" .. tg
+ end
end
+ return t
end
end
-function trackers.register(what,...)
- what = lower(what)
- local w = data[what]
- if not w then
- w = { }
- data[what] = w
- end
- for _, fnc in next, { ... } do
- local typ = type(fnc)
- if typ == "function" then
- w[#w+1] = fnc
- elseif typ == "string" then
- w[#w+1] = function(value) set(fnc,value,nesting) end
+local function empty(collected)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then -- maybe an extra tester for spacing only
+ return false
+ end
+ elseif n > 1 then
+ return false
+ end
+ end
+ end
end
end
+ return true
end
-function trackers.enable(what)
- done = { }
- set(what,true)
+finalizers.first = first
+finalizers.last = last
+finalizers.all = all
+finalizers.reverse = reverse
+finalizers.elements = all
+finalizers.default = all
+finalizers.attribute = attribute
+finalizers.att = att
+finalizers.count = count
+finalizers.position = position
+finalizers.match = match
+finalizers.index = index
+finalizers.attributes = attributes
+finalizers.chainattribute = chainattribute
+finalizers.text = text
+finalizers.texts = texts
+finalizers.tag = tag
+finalizers.name = name
+finalizers.tags = tags
+finalizers.empty = empty
+
+-- shortcuts -- we could support xmlfilter(id,pattern,first)
+
+function xml.first(id,pattern)
+ return first(xmlfilter(id,pattern))
end
-function trackers.disable(what)
- done = { }
- if not what or what == "" then
- trackers.reset(what)
+function xml.last(id,pattern)
+ return last(xmlfilter(id,pattern))
+end
+
+function xml.count(id,pattern)
+ return count(xmlfilter(id,pattern))
+end
+
+function xml.attribute(id,pattern,a,default)
+ return attribute(xmlfilter(id,pattern),a,default)
+end
+
+function xml.raw(id,pattern)
+ if pattern then
+ return raw(xmlfilter(id,pattern))
else
- set(what,false)
+ return raw(id)
end
end
-function trackers.reset(what)
- done = { }
- reset()
+function xml.text(id,pattern)
+ if pattern then
+ -- return text(xmlfilter(id,pattern))
+ local collected = xmlfilter(id,pattern)
+ return (collected and xmltostring(collected[1].dt)) or ""
+ elseif id then
+ -- return text(id)
+ return xmltostring(id.dt) or ""
+ else
+ return ""
+ end
end
-function trackers.list() -- pattern
- local list = table.sortedkeys(data)
- local user, system = { }, { }
- for l=1,#list do
- local what = list[l]
- if find(what,"^%*") then
- system[#system+1] = what
- else
- user[#user+1] = what
- end
- end
- return user, system
+xml.content = text
+
+function xml.position(id,pattern,n) -- element
+ return position(xmlfilter(id,pattern),n)
+end
+
+function xml.match(id,pattern) -- number
+ return match(xmlfilter(id,pattern))
+end
+
+function xml.empty(id,pattern)
+ return empty(xmlfilter(id,pattern))
end
+xml.all = xml.filter
+xml.index = xml.position
+xml.found = xml.filter
+
end -- of closure
@@ -5667,7 +7287,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['luat-env'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -5679,10 +7299,10 @@ if not modules then modules = { } end modules ['luat-env'] = {
-- evolved before bytecode arrays were available and so a lot of
-- code has disappeared already.
-local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end)
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end)
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local format = string.format
+local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
+local unquote, quote = string.unquote, string.quote
-- precautions
@@ -5716,13 +7336,14 @@ if not environment.jobname then environ
function environment.initialize_arguments(arg)
local arguments, files = { }, { }
environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
- for index, argument in pairs(arg) do
+ for index=1,#arg do
+ local argument = arg[index]
if index > 0 then
- local flag, value = argument:match("^%-+(.+)=(.-)$")
+ local flag, value = match(argument,"^%-+(.-)=(.-)$")
if flag then
- arguments[flag] = string.unquote(value or "")
+ arguments[flag] = unquote(value or "")
else
- flag = argument:match("^%-+(.+)")
+ flag = match(argument,"^%-+(.+)")
if flag then
arguments[flag] = true
else
@@ -5749,25 +7370,30 @@ function environment.argument(name,partial)
return arguments[name]
elseif partial then
if not sortedflags then
- sortedflags = { }
- for _,v in pairs(table.sortedkeys(arguments)) do
- sortedflags[#sortedflags+1] = "^" .. v
+ sortedflags = table.sortedkeys(arguments)
+ for k=1,#sortedflags do
+ sortedflags[k] = "^" .. sortedflags[k]
end
environment.sortedflags = sortedflags
end
-- example of potential clash: ^mode ^modefile
- for _,v in ipairs(sortedflags) do
- if name:find(v) then
- return arguments[v:sub(2,#v)]
+ for k=1,#sortedflags do
+ local v = sortedflags[k]
+ if find(name,v) then
+ return arguments[sub(v,2,#v)]
end
end
end
return nil
end
+environment.argument("x",true)
+
function environment.split_arguments(separator) -- rather special, cut-off before separator
local done, before, after = false, { }, { }
- for _,v in ipairs(environment.original_arguments) do
+ local original_arguments = environment.original_arguments
+ for k=1,#original_arguments do
+ local v = original_arguments[k]
if not done and v == separator then
done = true
elseif done then
@@ -5784,16 +7410,17 @@ function environment.reconstruct_commandline(arg,noquote)
if noquote and #arg == 1 then
local a = arg[1]
a = resolvers.resolve(a)
- a = a:unquote()
+ a = unquote(a)
return a
- elseif next(arg) then
+ elseif #arg > 0 then
local result = { }
- for _,a in ipairs(arg) do -- ipairs 1 .. #n
+ for i=1,#arg do
+ local a = arg[i]
a = resolvers.resolve(a)
- a = a:unquote()
- a = a:gsub('"','\\"') -- tricky
- if a:find(" ") then
- result[#result+1] = a:quote()
+ a = unquote(a)
+ a = gsub(a,'"','\\"') -- tricky
+ if find(a," ") then
+ result[#result+1] = quote(a)
else
result[#result+1] = a
end
@@ -5806,17 +7433,18 @@ end
if arg then
- -- new, reconstruct quoted snippets (maybe better just remnove the " then and add them later)
+ -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
local newarg, instring = { }, false
- for index, argument in ipairs(arg) do
- if argument:find("^\"") then
- newarg[#newarg+1] = argument:gsub("^\"","")
- if not argument:find("\"$") then
+ for index=1,#arg do
+ local argument = arg[index]
+ if find(argument,"^\"") then
+ newarg[#newarg+1] = gsub(argument,"^\"","")
+ if not find(argument,"\"$") then
instring = true
end
- elseif argument:find("\"$") then
- newarg[#newarg] = newarg[#newarg] .. " " .. argument:gsub("\"$","")
+ elseif find(argument,"\"$") then
+ newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
instring = false
elseif instring then
newarg[#newarg] = newarg[#newarg] .. " " .. argument
@@ -5871,12 +7499,12 @@ function environment.luafilechunk(filename) -- used for loading lua bytecode in
filename = file.replacesuffix(filename, "lua")
local fullname = environment.luafile(filename)
if fullname and fullname ~= "" then
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","loading file %s", fullname)
end
return environment.loadedluacode(fullname)
else
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","unknown file %s", filename)
end
return nil
@@ -5896,7 +7524,7 @@ function environment.loadluafile(filename, version)
-- when not overloaded by explicit suffix we look for a luc file first
local fullname = (lucname and environment.luafile(lucname)) or ""
if fullname ~= "" then
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","loading %s", fullname)
end
chunk = loadfile(fullname) -- this way we don't need a file exists check
@@ -5914,7 +7542,7 @@ function environment.loadluafile(filename, version)
if v == version then
return true
else
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","version mismatch for %s: lua=%s, luc=%s", filename, v, version)
end
environment.loadluafile(filename)
@@ -5925,12 +7553,12 @@ function environment.loadluafile(filename, version)
end
fullname = (luaname and environment.luafile(luaname)) or ""
if fullname ~= "" then
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","loading %s", fullname)
end
chunk = loadfile(fullname) -- this way we don't need a file exists check
if not chunk then
- if verbose then
+ if trace_locating then
logs.report("fileio","unknown file %s", filename)
end
else
@@ -5948,7 +7576,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['trac-inf'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to trac-inf.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -5973,6 +7601,14 @@ function statistics.hastimer(instance)
return instance and instance.starttime
end
+function statistics.resettiming(instance)
+ if not instance then
+ notimer = { timing = 0, loadtime = 0 }
+ else
+ instance.timing, instance.loadtime = 0, 0
+ end
+end
+
function statistics.starttiming(instance)
if not instance then
notimer = { }
@@ -5987,6 +7623,8 @@ function statistics.starttiming(instance)
if not instance.loadtime then
instance.loadtime = 0
end
+ else
+--~ logs.report("system","nested timing (%s)",tostring(instance))
end
instance.timing = it + 1
end
@@ -6032,6 +7670,12 @@ function statistics.elapsedindeed(instance)
return t > statistics.threshold
end
+function statistics.elapsedseconds(instance,rest) -- returns nil if 0 seconds
+ if statistics.elapsedindeed(instance) then
+ return format("%s seconds %s", statistics.elapsedtime(instance),rest or "")
+ end
+end
+
-- general function
function statistics.register(tag,fnc)
@@ -6110,14 +7754,32 @@ function statistics.timed(action,report)
report("total runtime: %s",statistics.elapsedtime(timer))
end
+-- where, not really the best spot for this:
+
+commands = commands or { }
+
+local timer
+
+function commands.resettimer()
+ statistics.resettiming(timer)
+ statistics.starttiming(timer)
+end
+
+function commands.elapsedtime()
+ statistics.stoptiming(timer)
+ tex.sprint(statistics.elapsedtime(timer))
+end
+
+commands.resettimer()
+
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-log'] = {
+if not modules then modules = { } end modules ['trac-log'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to trac-log.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -6125,7 +7787,11 @@ if not modules then modules = { } end modules ['luat-log'] = {
-- this is old code that needs an overhaul
-local write_nl, write, format = texio.write_nl or print, texio.write or io.write, string.format
+--~ io.stdout:setvbuf("no")
+--~ io.stderr:setvbuf("no")
+
+local write_nl, write = texio.write_nl or print, texio.write or io.write
+local format, gmatch = string.format, string.gmatch
local texcount = tex and tex.count
if texlua then
@@ -6206,25 +7872,48 @@ function logs.tex.line(fmt,...) -- new
end
end
+--~ function logs.tex.start_page_number()
+--~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+--~ if real > 0 then
+--~ if user > 0 then
+--~ if sub > 0 then
+--~ write(format("[%s.%s.%s",real,user,sub))
+--~ else
+--~ write(format("[%s.%s",real,user))
+--~ end
+--~ else
+--~ write(format("[%s",real))
+--~ end
+--~ else
+--~ write("[-")
+--~ end
+--~ end
+
+--~ function logs.tex.stop_page_number()
+--~ write("]")
+--~ end
+
+local real, user, sub
+
function logs.tex.start_page_number()
- local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+ real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+end
+
+function logs.tex.stop_page_number()
if real > 0 then
if user > 0 then
if sub > 0 then
- write(format("[%s.%s.%s",real,user,sub))
+ logs.report("pages", "flushing realpage %s, userpage %s, subpage %s",real,user,sub)
else
- write(format("[%s.%s",real,user))
+ logs.report("pages", "flushing realpage %s, userpage %s",real,user)
end
else
- write(format("[%s",real))
+ logs.report("pages", "flushing realpage %s",real)
end
else
- write("[-")
+ logs.report("pages", "flushing page")
end
-end
-
-function logs.tex.stop_page_number()
- write("]")
+ io.flush()
end
logs.tex.report_job_stat = statistics.show_job_stat
@@ -6324,7 +8013,7 @@ end
function logs.setprogram(_name_,_banner_,_verbose_)
name, banner = _name_, _banner_
if _verbose_ then
- trackers.enable("resolvers.verbose")
+ trackers.enable("resolvers.locating")
end
logs.set_method("tex")
logs.report = report -- also used in libraries
@@ -6337,9 +8026,9 @@ end
function logs.setverbose(what)
if what then
- trackers.enable("resolvers.verbose")
+ trackers.enable("resolvers.locating")
else
- trackers.disable("resolvers.verbose")
+ trackers.disable("resolvers.locating")
end
logs.verbose = what or false
end
@@ -6356,7 +8045,7 @@ logs.report = logs.tex.report
logs.simple = logs.tex.report
function logs.reportlines(str) -- todo: <lines></lines>
- for line in str:gmatch("(.-)[\n\r]") do
+ for line in gmatch(str,"(.-)[\n\r]") do
logs.report(line)
end
end
@@ -6367,8 +8056,12 @@ end
logs.simpleline = logs.reportline
-function logs.help(message,option)
+function logs.reportbanner() -- for scripts too
logs.report(banner)
+end
+
+function logs.help(message,option)
+ logs.reportbanner()
logs.reportline()
logs.reportlines(message)
local moreinfo = logs.moreinfo or ""
@@ -6400,6 +8093,11 @@ end
--~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
--~ end
+function logs.fatal(where,...)
+ logs.report(where,"fatal error: %s, aborting now",format(...))
+ os.exit()
+end
+
end -- of closure
@@ -6407,10 +8105,10 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-inp'] = {
version = 1.001,
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files",
- comment = "companion to luat-lib.tex",
}
-- After a few years using the code the large luat-inp.lua file
@@ -6422,7 +8120,7 @@ if not modules then modules = { } end modules ['data-inp'] = {
-- * some public auxiliary functions were made private
--
-- TODO: os.getenv -> os.env[]
--- TODO: instances.[hashes,cnffiles,configurations,522] -> ipairs (alles check, sneller)
+-- TODO: instances.[hashes,cnffiles,configurations,522]
-- TODO: check escaping in find etc, too much, too slow
-- This lib is multi-purpose and can be loaded again later on so that
@@ -6443,12 +8141,13 @@ if not modules then modules = { } end modules ['data-inp'] = {
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
local next, type = next, type
+local lpegmatch = lpeg.match
-local trace_locating, trace_detail, trace_verbose = false, false, false
+local trace_locating, trace_detail, trace_expansions = false, false, false
-trackers.register("resolvers.verbose", function(v) trace_verbose = v end)
-trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end)
-trackers.register("resolvers.detail", function(v) trace_detail = v trackers.enable("resolvers.verbose,resolvers.detail") end)
+trackers.register("resolvers.locating", function(v) trace_locating = v end)
+trackers.register("resolvers.details", function(v) trace_detail = v end)
+trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo
if not resolvers then
resolvers = {
@@ -6472,7 +8171,7 @@ resolvers.generators.notfound = { nil }
resolvers.cacheversion = '1.0.1'
resolvers.cnfname = 'texmf.cnf'
resolvers.luaname = 'texmfcnf.lua'
-resolvers.homedir = os.env[os.platform == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~'
+resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~'
resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}'
local dummy_path_expr = "^!*unset/*$"
@@ -6514,8 +8213,8 @@ suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' }
alternatives['map files'] = 'map'
alternatives['enc files'] = 'enc'
-alternatives['cid files'] = 'cid'
-alternatives['fea files'] = 'fea'
+alternatives['cid maps'] = 'cid' -- great, why no cid files
+alternatives['font feature files'] = 'fea' -- and fea files here
alternatives['opentype fonts'] = 'otf'
alternatives['truetype fonts'] = 'ttf'
alternatives['truetype collections'] = 'ttc'
@@ -6531,6 +8230,11 @@ formats ['sfd'] = 'SFDFONTS'
suffixes ['sfd'] = { 'sfd' }
alternatives['subfont definition files'] = 'sfd'
+-- lib paths
+
+formats ['lib'] = 'CLUAINPUTS' -- new (needs checking)
+suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' }
+
-- In practice we will work within one tds tree, but i want to keep
-- the option open to build tools that look at multiple trees, which is
-- why we keep the tree specific data in a table. We used to pass the
@@ -6653,8 +8357,10 @@ local function check_configuration() -- not yet ok, no time for debugging now
-- bad luck
end
fix("LUAINPUTS" , ".;$TEXINPUTS;$TEXMFSCRIPTS") -- no progname, hm
- fix("FONTFEATURES", ".;$TEXMF/fonts/fea//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
- fix("FONTCIDMAPS" , ".;$TEXMF/fonts/cid//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
+ -- this will go away some day
+ fix("FONTFEATURES", ".;$TEXMF/fonts/{data,fea}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
+ fix("FONTCIDMAPS" , ".;$TEXMF/fonts/{data,cid}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
+ --
fix("LUATEXLIBS" , ".;$TEXMF/luatex/lua//")
end
@@ -6669,7 +8375,7 @@ function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
end
end
-resolvers.settrace(os.getenv("MTX.resolvers.TRACE") or os.getenv("MTX_INPUT_TRACE"))
+resolvers.settrace(os.getenv("MTX_INPUT_TRACE"))
function resolvers.osenv(key)
local ie = instance.environment
@@ -6757,37 +8463,43 @@ end
-- work that well; the parsing is ok, but dealing with the resulting
-- table is a pain because we need to work inside-out recursively
+local function do_first(a,b)
+ local t = { }
+ for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end
+ return "{" .. concat(t,",") .. "}"
+end
+
+local function do_second(a,b)
+ local t = { }
+ for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end
+ return "{" .. concat(t,",") .. "}"
+end
+
+local function do_both(a,b)
+ local t = { }
+ for sa in gmatch(a,"[^,]+") do
+ for sb in gmatch(b,"[^,]+") do
+ t[#t+1] = sa .. sb
+ end
+ end
+ return "{" .. concat(t,",") .. "}"
+end
+
+local function do_three(a,b,c)
+ return a .. b.. c
+end
+
local function splitpathexpr(str, t, validate)
-- no need for further optimization as it is only called a
- -- few times, we can use lpeg for the sub; we could move
- -- the local functions outside the body
+ -- few times, we can use lpeg for the sub
+ if trace_expansions then
+ logs.report("fileio","expanding variable '%s'",str)
+ end
t = t or { }
str = gsub(str,",}",",@}")
str = gsub(str,"{,","{@,")
-- str = "@" .. str .. "@"
local ok, done
- local function do_first(a,b)
- local t = { }
- for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end
- return "{" .. concat(t,",") .. "}"
- end
- local function do_second(a,b)
- local t = { }
- for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end
- return "{" .. concat(t,",") .. "}"
- end
- local function do_both(a,b)
- local t = { }
- for sa in gmatch(a,"[^,]+") do
- for sb in gmatch(b,"[^,]+") do
- t[#t+1] = sa .. sb
- end
- end
- return "{" .. concat(t,",") .. "}"
- end
- local function do_three(a,b,c)
- return a .. b.. c
- end
while true do
done = false
while true do
@@ -6818,6 +8530,11 @@ local function splitpathexpr(str, t, validate)
t[#t+1] = s
end
end
+ if trace_expansions then
+ for k=1,#t do
+ logs.report("fileio","% 4i: %s",k,t[k])
+ end
+ end
return t
end
@@ -6857,18 +8574,27 @@ end
-- also we now follow the stupid route: if not set then just assume *one*
-- cnf file under texmf (i.e. distribution)
-resolvers.ownpath = resolvers.ownpath or nil
-resolvers.ownbin = resolvers.ownbin or arg[-2] or arg[-1] or arg[0] or "luatex"
-resolvers.autoselfdir = true -- false may be handy for debugging
+local args = environment and environment.original_arguments or arg -- this needs a cleanup
+
+resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
+resolvers.ownbin = gsub(resolvers.ownbin,"\\","/")
function resolvers.getownpath()
- if not resolvers.ownpath then
- if resolvers.autoselfdir and os.selfdir then
- resolvers.ownpath = os.selfdir
- else
- local binary = resolvers.ownbin
- if os.platform == "windows" then
- binary = file.replacesuffix(binary,"exe")
+ local ownpath = resolvers.ownpath or os.selfdir
+ if not ownpath or ownpath == "" or ownpath == "unset" then
+ ownpath = args[-1] or arg[-1]
+ ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
+ if not ownpath or ownpath == "" then
+ ownpath = args[-0] or arg[-0]
+ ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
+ end
+ local binary = resolvers.ownbin
+ if not ownpath or ownpath == "" then
+ ownpath = ownpath and file.dirname(binary)
+ end
+ if not ownpath or ownpath == "" then
+ if os.binsuffix ~= "" then
+ binary = file.replacesuffix(binary,os.binsuffix)
end
for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
local b = file.join(p,binary)
@@ -6880,30 +8606,39 @@ function resolvers.getownpath()
local olddir = lfs.currentdir()
if lfs.chdir(p) then
local pp = lfs.currentdir()
- if trace_verbose and p ~= pp then
- logs.report("fileio","following symlink %s to %s",p,pp)
+ if trace_locating and p ~= pp then
+ logs.report("fileio","following symlink '%s' to '%s'",p,pp)
end
- resolvers.ownpath = pp
+ ownpath = pp
lfs.chdir(olddir)
else
- if trace_verbose then
- logs.report("fileio","unable to check path %s",p)
+ if trace_locating then
+ logs.report("fileio","unable to check path '%s'",p)
end
- resolvers.ownpath = p
+ ownpath = p
end
break
end
end
end
- if not resolvers.ownpath then resolvers.ownpath = '.' end
+ if not ownpath or ownpath == "" then
+ ownpath = "."
+ logs.report("fileio","forcing fallback ownpath .")
+ elseif trace_locating then
+ logs.report("fileio","using ownpath '%s'",ownpath)
+ end
+ end
+ resolvers.ownpath = ownpath
+ function resolvers.getownpath()
+ return resolvers.ownpath
end
- return resolvers.ownpath
+ return ownpath
end
local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" }
local function identify_own()
- local ownpath = resolvers.getownpath() or lfs.currentdir()
+ local ownpath = resolvers.getownpath() or dir.current()
local ie = instance.environment
if ownpath then
if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end
@@ -6916,10 +8651,10 @@ local function identify_own()
if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end
if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end
if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end
- if trace_verbose then
+ if trace_locating then
for i=1,#own_places do
local v = own_places[i]
- logs.report("fileio","variable %s set to %s",v,resolvers.env(v) or "unknown")
+ logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown")
end
end
identify_own = function() end
@@ -6951,10 +8686,8 @@ end
local function load_cnf_file(fname)
fname = resolvers.clean_path(fname)
local lname = file.replacesuffix(fname,'lua')
- local f = io.open(lname)
- if f then -- this will go
- f:close()
- local dname = file.dirname(fname)
+ if lfs.isfile(lname) then
+ local dname = file.dirname(fname) -- fname ?
if not instance.configuration[dname] then
resolvers.load_data(dname,'configuration',lname and file.basename(lname))
instance.order[#instance.order+1] = instance.configuration[dname]
@@ -6962,8 +8695,8 @@ local function load_cnf_file(fname)
else
f = io.open(fname)
if f then
- if trace_verbose then
- logs.report("fileio","loading %s", fname)
+ if trace_locating then
+ logs.report("fileio","loading configuration file %s", fname)
end
local line, data, n, k, v
local dname = file.dirname(fname)
@@ -6997,14 +8730,16 @@ local function load_cnf_file(fname)
end
end
f:close()
- elseif trace_verbose then
- logs.report("fileio","skipping %s", fname)
+ elseif trace_locating then
+ logs.report("fileio","skipping configuration file '%s'", fname)
end
end
end
local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared)
- for _,c in ipairs(instance.order) do
+ local order = instance.order
+ for i=1,#order do
+ local c = order[i]
for k,v in next, c do
if not instance.variables[k] then
if instance.environment[k] then
@@ -7020,19 +8755,24 @@ end
function resolvers.load_cnf()
local function loadoldconfigdata()
- for _, fname in ipairs(instance.cnffiles) do
- load_cnf_file(fname)
+ local cnffiles = instance.cnffiles
+ for i=1,#cnffiles do
+ load_cnf_file(cnffiles[i])
end
end
-- instance.cnffiles contain complete names now !
+ -- we still use a funny mix of cnf and new but soon
+ -- we will switch to lua exclusively as we only use
+ -- the file to collect the tree roots
if #instance.cnffiles == 0 then
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)")
end
else
- instance.rootpath = instance.cnffiles[1]
- for k,fname in ipairs(instance.cnffiles) do
- instance.cnffiles[k] = file.collapse_path(gsub(fname,"\\",'/'))
+ local cnffiles = instance.cnffiles
+ instance.rootpath = cnffiles[1]
+ for k=1,#cnffiles do
+ instance.cnffiles[k] = file.collapse_path(cnffiles[k])
end
for i=1,3 do
instance.rootpath = file.dirname(instance.rootpath)
@@ -7060,8 +8800,9 @@ function resolvers.load_lua()
-- yet harmless
else
instance.rootpath = instance.luafiles[1]
- for k,fname in ipairs(instance.luafiles) do
- instance.luafiles[k] = file.collapse_path(gsub(fname,"\\",'/'))
+ local luafiles = instance.luafiles
+ for k=1,#luafiles do
+ instance.luafiles[k] = file.collapse_path(luafiles[k])
end
for i=1,3 do
instance.rootpath = file.dirname(instance.rootpath)
@@ -7093,14 +8834,14 @@ end
function resolvers.append_hash(type,tag,name)
if trace_locating then
- logs.report("fileio","= hash append: %s",tag)
+ logs.report("fileio","hash '%s' appended",tag)
end
insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } )
end
function resolvers.prepend_hash(type,tag,name)
if trace_locating then
- logs.report("fileio","= hash prepend: %s",tag)
+ logs.report("fileio","hash '%s' prepended",tag)
end
insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } )
end
@@ -7124,9 +8865,11 @@ end
-- locators
function resolvers.locatelists()
- for _, path in ipairs(resolvers.clean_path_list('TEXMF')) do
- if trace_verbose then
- logs.report("fileio","locating list of %s",path)
+ local texmfpaths = resolvers.clean_path_list('TEXMF')
+ for i=1,#texmfpaths do
+ local path = texmfpaths[i]
+ if trace_locating then
+ logs.report("fileio","locating list of '%s'",path)
end
resolvers.locatedatabase(file.collapse_path(path))
end
@@ -7139,11 +8882,11 @@ end
function resolvers.locators.tex(specification)
if specification and specification ~= '' and lfs.isdir(specification) then
if trace_locating then
- logs.report("fileio",'! tex locator found: %s',specification)
+ logs.report("fileio","tex locator '%s' found",specification)
end
resolvers.append_hash('file',specification,filename)
elseif trace_locating then
- logs.report("fileio",'? tex locator not found: %s',specification)
+ logs.report("fileio","tex locator '%s' not found",specification)
end
end
@@ -7157,7 +8900,9 @@ function resolvers.loadfiles()
instance.loaderror = false
instance.files = { }
if not instance.renewcache then
- for _, hash in ipairs(instance.hashes) do
+ local hashes = instance.hashes
+ for k=1,#hashes do
+ local hash = hashes[k]
resolvers.hashdatabase(hash.tag,hash.name)
if instance.loaderror then break end
end
@@ -7171,8 +8916,9 @@ end
-- generators:
function resolvers.loadlists()
- for _, hash in ipairs(instance.hashes) do
- resolvers.generatedatabase(hash.tag)
+ local hashes = instance.hashes
+ for i=1,#hashes do
+ resolvers.generatedatabase(hashes[i].tag)
end
end
@@ -7184,10 +8930,27 @@ end
local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t")
+--~ local l_confusing = lpeg.P(" ")
+--~ local l_character = lpeg.patterns.utf8
+--~ local l_dangerous = lpeg.P(".")
+
+--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1)
+--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false)
+
+--~ local function test(str)
+--~ print(str,lpeg.match(l_normal,str))
+--~ end
+--~ test("ヒラギノ明朝 Pro W3")
+--~ test("..ヒラギノ明朝 Pro W3")
+--~ test(":ヒラギノ明朝 Pro W3;")
+--~ test("ヒラギノ明朝 /Pro W3;")
+--~ test("ヒラギノ明朝 Pro W3")
+
function resolvers.generators.tex(specification)
local tag = specification
- if trace_verbose then
- logs.report("fileio","scanning path %s",specification)
+ if trace_locating then
+ logs.report("fileio","scanning path '%s'",specification)
end
instance.files[tag] = { }
local files = instance.files[tag]
@@ -7203,7 +8966,8 @@ function resolvers.generators.tex(specification)
full = spec
end
for name in directory(full) do
- if not weird:match(name) then
+ if not lpegmatch(weird,name) then
+ -- if lpegmatch(l_normal,name) then
local mode = attributes(full..name,'mode')
if mode == 'file' then
if path then
@@ -7236,7 +9000,7 @@ function resolvers.generators.tex(specification)
end
end
action()
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r)
end
end
@@ -7251,11 +9015,48 @@ end
-- we join them and split them after the expansion has taken place. This
-- is more convenient.
+--~ local checkedsplit = string.checkedsplit
+
+local cache = { }
+
+local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;")))
+
+local function split_kpse_path(str) -- beware, this can be either a path or a {specification}
+ local found = cache[str]
+ if not found then
+ if str == "" then
+ found = { }
+ else
+ str = gsub(str,"\\","/")
+--~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator)
+local split = lpegmatch(splitter,str)
+ found = { }
+ for i=1,#split do
+ local s = split[i]
+ if not find(s,"^{*unset}*") then
+ found[#found+1] = s
+ end
+ end
+ if trace_expansions then
+ logs.report("fileio","splitting path specification '%s'",str)
+ for k=1,#found do
+ logs.report("fileio","% 4i: %s",k,found[k])
+ end
+ end
+ cache[str] = found
+ end
+ end
+ return found
+end
+
+resolvers.split_kpse_path = split_kpse_path
+
function resolvers.splitconfig()
- for i,c in ipairs(instance) do
- for k,v in pairs(c) do
+ for i=1,#instance do
+ local c = instance[i]
+ for k,v in next, c do
if type(v) == 'string' then
- local t = file.split_path(v)
+ local t = split_kpse_path(v)
if #t > 1 then
c[k] = t
end
@@ -7265,21 +9066,25 @@ function resolvers.splitconfig()
end
function resolvers.joinconfig()
- for i,c in ipairs(instance.order) do
- for k,v in pairs(c) do -- ipairs?
+ local order = instance.order
+ for i=1,#order do
+ local c = order[i]
+ for k,v in next, c do -- indexed?
if type(v) == 'table' then
c[k] = file.join_path(v)
end
end
end
end
+
function resolvers.split_path(str)
if type(str) == 'table' then
return str
else
- return file.split_path(str)
+ return split_kpse_path(str)
end
end
+
function resolvers.join_path(str)
if type(str) == 'table' then
return file.join_path(str)
@@ -7291,8 +9096,9 @@ end
function resolvers.splitexpansions()
local ie = instance.expansions
for k,v in next, ie do
- local t, h = { }, { }
- for _,vv in ipairs(file.split_path(v)) do
+ local t, h, p = { }, { }, split_kpse_path(v)
+ for kk=1,#p do
+ local vv = p[kk]
if vv ~= "" and not h[vv] then
t[#t+1] = vv
h[vv] = true
@@ -7339,11 +9145,15 @@ function resolvers.serialize(files)
end
t[#t+1] = "return {"
if instance.sortdata then
- for _, k in pairs(sortedkeys(files)) do -- ipairs
+ local sortedfiles = sortedkeys(files)
+ for i=1,#sortedfiles do
+ local k = sortedfiles[i]
local fk = files[k]
if type(fk) == 'table' then
t[#t+1] = "\t['" .. k .. "']={"
- for _, kk in pairs(sortedkeys(fk)) do -- ipairs
+ local sortedfk = sortedkeys(fk)
+ for j=1,#sortedfk do
+ local kk = sortedfk[j]
t[#t+1] = dump(kk,fk[kk],"\t\t")
end
t[#t+1] = "\t},"
@@ -7368,12 +9178,18 @@ function resolvers.serialize(files)
return concat(t,"\n")
end
+local data_state = { }
+
+function resolvers.data_state()
+ return data_state or { }
+end
+
function resolvers.save_data(dataname, makename) -- untested without cache overload
for cachename, files in next, instance[dataname] do
local name = (makename or file.join)(cachename,dataname)
local luaname, lucname = name .. ".lua", name .. ".luc"
- if trace_verbose then
- logs.report("fileio","preparing %s for %s",dataname,cachename)
+ if trace_locating then
+ logs.report("fileio","preparing '%s' for '%s'",dataname,cachename)
end
for k, v in next, files do
if type(v) == "table" and #v == 1 then
@@ -7387,24 +9203,25 @@ function resolvers.save_data(dataname, makename) -- untested without cache overl
date = os.date("%Y-%m-%d"),
time = os.date("%H:%M:%S"),
content = files,
+ uuid = os.uuid(),
}
local ok = io.savedata(luaname,resolvers.serialize(data))
if ok then
- if trace_verbose then
- logs.report("fileio","%s saved in %s",dataname,luaname)
+ if trace_locating then
+ logs.report("fileio","'%s' saved in '%s'",dataname,luaname)
end
if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip
- if trace_verbose then
- logs.report("fileio","%s compiled to %s",dataname,lucname)
+ if trace_locating then
+ logs.report("fileio","'%s' compiled to '%s'",dataname,lucname)
end
else
- if trace_verbose then
- logs.report("fileio","compiling failed for %s, deleting file %s",dataname,lucname)
+ if trace_locating then
+ logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname)
end
os.remove(lucname)
end
- elseif trace_verbose then
- logs.report("fileio","unable to save %s in %s (access error)",dataname,luaname)
+ elseif trace_locating then
+ logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname)
end
end
end
@@ -7416,19 +9233,20 @@ function resolvers.load_data(pathname,dataname,filename,makename) -- untested wi
if blob then
local data = blob()
if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then
- if trace_verbose then
- logs.report("fileio","loading %s for %s from %s",dataname,pathname,filename)
+ data_state[#data_state+1] = data.uuid
+ if trace_locating then
+ logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename)
end
instance[dataname][pathname] = data.content
else
- if trace_verbose then
- logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename)
+ if trace_locating then
+ logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
end
instance[dataname][pathname] = { }
instance.loaderror = true
end
- elseif trace_verbose then
- logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename)
+ elseif trace_locating then
+ logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
end
end
@@ -7447,15 +9265,17 @@ function resolvers.resetconfig()
end
function resolvers.loadnewconfig()
- for _, cnf in ipairs(instance.luafiles) do
+ local luafiles = instance.luafiles
+ for i=1,#luafiles do
+ local cnf = luafiles[i]
local pathname = file.dirname(cnf)
local filename = file.join(pathname,resolvers.luaname)
local blob = loadfile(filename)
if blob then
local data = blob()
if data then
- if trace_verbose then
- logs.report("fileio","loading configuration file %s",filename)
+ if trace_locating then
+ logs.report("fileio","loading configuration file '%s'",filename)
end
if true then
-- flatten to variable.progname
@@ -7476,14 +9296,14 @@ function resolvers.loadnewconfig()
instance['setup'][pathname] = data
end
else
- if trace_verbose then
- logs.report("fileio","skipping configuration file %s",filename)
+ if trace_locating then
+ logs.report("fileio","skipping configuration file '%s'",filename)
end
instance['setup'][pathname] = { }
instance.loaderror = true
end
- elseif trace_verbose then
- logs.report("fileio","skipping configuration file %s",filename)
+ elseif trace_locating then
+ logs.report("fileio","skipping configuration file '%s'",filename)
end
instance.order[#instance.order+1] = instance.setup[pathname]
if instance.loaderror then break end
@@ -7492,7 +9312,9 @@ end
function resolvers.loadoldconfig()
if not instance.renewcache then
- for _, cnf in ipairs(instance.cnffiles) do
+ local cnffiles = instance.cnffiles
+ for i=1,#cnffiles do
+ local cnf = cnffiles[i]
local dname = file.dirname(cnf)
resolvers.load_data(dname,'configuration')
instance.order[#instance.order+1] = instance.configuration[dname]
@@ -7682,7 +9504,7 @@ end
function resolvers.expanded_path_list(str)
if not str then
- return ep or { }
+ return ep or { } -- ep ?
elseif instance.savelists then
-- engine+progname hash
str = gsub(str,"%$","")
@@ -7700,9 +9522,9 @@ end
function resolvers.expanded_path_list_from_var(str) -- brrr
local tmp = resolvers.var_of_format_or_suffix(gsub(str,"%$",""))
if tmp ~= "" then
- return resolvers.expanded_path_list(str)
- else
return resolvers.expanded_path_list(tmp)
+ else
+ return resolvers.expanded_path_list(str)
end
end
@@ -7749,9 +9571,9 @@ function resolvers.isreadable.file(name)
local readable = lfs.isfile(name) -- brrr
if trace_detail then
if readable then
- logs.report("fileio","+ readable: %s",name)
+ logs.report("fileio","file '%s' is readable",name)
else
- logs.report("fileio","- readable: %s", name)
+ logs.report("fileio","file '%s' is not readable", name)
end
end
return readable
@@ -7767,7 +9589,7 @@ local function collect_files(names)
for k=1,#names do
local fname = names[k]
if trace_detail then
- logs.report("fileio","? blobpath asked: %s",fname)
+ logs.report("fileio","checking name '%s'",fname)
end
local bname = file.basename(fname)
local dname = file.dirname(fname)
@@ -7783,7 +9605,7 @@ local function collect_files(names)
local files = blobpath and instance.files[blobpath]
if files then
if trace_detail then
- logs.report("fileio",'? blobpath do: %s (%s)',blobpath,bname)
+ logs.report("fileio","deep checking '%s' (%s)",blobpath,bname)
end
local blobfile = files[bname]
if not blobfile then
@@ -7817,7 +9639,7 @@ local function collect_files(names)
end
end
elseif trace_locating then
- logs.report("fileio",'! blobpath no: %s (%s)',blobpath,bname)
+ logs.report("fileio","no match in '%s' (%s)",blobpath,bname)
end
end
end
@@ -7867,14 +9689,13 @@ end
local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc)
local result = collected or { }
local stamp = nil
- filename = file.collapse_path(filename) -- elsewhere
- filename = file.collapse_path(gsub(filename,"\\","/")) -- elsewhere
+ filename = file.collapse_path(filename)
-- speed up / beware: format problem
if instance.remember then
stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format
if instance.found[stamp] then
if trace_locating then
- logs.report("fileio",'! remembered: %s',filename)
+ logs.report("fileio","remembering file '%s'",filename)
end
return instance.found[stamp]
end
@@ -7882,7 +9703,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if not dangerous[instance.format or "?"] then
if resolvers.isreadable.file(filename) then
if trace_detail then
- logs.report("fileio",'= found directly: %s',filename)
+ logs.report("fileio","file '%s' found directly",filename)
end
instance.found[stamp] = { filename }
return { filename }
@@ -7890,13 +9711,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
end
if find(filename,'%*') then
if trace_locating then
- logs.report("fileio",'! wildcard: %s', filename)
+ logs.report("fileio","checking wildcard '%s'", filename)
end
result = resolvers.find_wildcard_files(filename)
elseif file.is_qualified_path(filename) then
if resolvers.isreadable.file(filename) then
if trace_locating then
- logs.report("fileio",'! qualified: %s', filename)
+ logs.report("fileio","qualified name '%s'", filename)
end
result = { filename }
else
@@ -7906,7 +9727,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
forcedname = filename .. ".tex"
if resolvers.isreadable.file(forcedname) then
if trace_locating then
- logs.report("fileio",'! no suffix, forcing standard filetype: tex')
+ logs.report("fileio","no suffix, forcing standard filetype 'tex'")
end
result, ok = { forcedname }, true
end
@@ -7916,7 +9737,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
forcedname = filename .. "." .. s
if resolvers.isreadable.file(forcedname) then
if trace_locating then
- logs.report("fileio",'! no suffix, forcing format filetype: %s', s)
+ logs.report("fileio","no suffix, forcing format filetype '%s'", s)
end
result, ok = { forcedname }, true
break
@@ -7928,7 +9749,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
-- try to find in tree (no suffix manipulation), here we search for the
-- matching last part of the name
local basename = file.basename(filename)
- local pattern = (filename .. "$"):gsub("([%.%-])","%%%1")
+ local pattern = gsub(filename .. "$","([%.%-])","%%%1")
local savedformat = instance.format
local format = savedformat or ""
if format == "" then
@@ -7938,19 +9759,21 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
instance.format = "othertextfiles" -- kind of everything, maybe texinput is better
end
--
- local resolved = collect_instance_files(basename)
- if #result == 0 then
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered)
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename)
+ if #result == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered)
+ end
end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if rr:find(pattern) then
- result[#result+1], ok = rr, true
+ resolvers.format = savedformat
+ --
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1], ok = rr, true
+ end
end
end
-- a real wildcard:
@@ -7959,14 +9782,14 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
-- local filelist = collect_files({basename})
-- for f=1,#filelist do
-- local ff = filelist[f][3] or ""
- -- if ff:find(pattern) then
+ -- if find(ff,pattern) then
-- result[#result+1], ok = ff, true
-- end
-- end
-- end
end
if not ok and trace_locating then
- logs.report("fileio",'? qualified: %s', filename)
+ logs.report("fileio","qualified name '%s'", filename)
end
end
else
@@ -7985,12 +9808,12 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
wantedfiles[#wantedfiles+1] = forcedname
filetype = resolvers.format_of_suffix(forcedname)
if trace_locating then
- logs.report("fileio",'! forcing filetype: %s',filetype)
+ logs.report("fileio","forcing filetype '%s'",filetype)
end
else
filetype = resolvers.format_of_suffix(filename)
if trace_locating then
- logs.report("fileio",'! using suffix based filetype: %s',filetype)
+ logs.report("fileio","using suffix based filetype '%s'",filetype)
end
end
else
@@ -8002,7 +9825,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
end
filetype = instance.format
if trace_locating then
- logs.report("fileio",'! using given filetype: %s',filetype)
+ logs.report("fileio","using given filetype '%s'",filetype)
end
end
local typespec = resolvers.variable_of_format(filetype)
@@ -8010,9 +9833,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if not pathlist or #pathlist == 0 then
-- no pathlist, access check only / todo == wildcard
if trace_detail then
- logs.report("fileio",'? filename: %s',filename)
- logs.report("fileio",'? filetype: %s',filetype or '?')
- logs.report("fileio",'? wanted files: %s',concat(wantedfiles," | "))
+ logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
end
for k=1,#wantedfiles do
local fname = wantedfiles[k]
@@ -8033,36 +9854,59 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
else
-- list search
local filelist = collect_files(wantedfiles)
- local doscan, recurse
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = file.dirname(filelist[i][2]) .. "/"
+ end
+ end
if trace_detail then
- logs.report("fileio",'? filename: %s',filename)
+ logs.report("fileio","checking filename '%s'",filename)
end
-- a bit messy ... esp the doscan setting here
+ local doscan
for k=1,#pathlist do
local path = pathlist[k]
if find(path,"^!!") then doscan = false else doscan = true end
- if find(path,"//$") then recurse = true else recurse = false end
local pathname = gsub(path,"^!+", '')
done = false
-- using file list
- if filelist and not (done and not instance.allresults) and recurse then
- -- compare list entries with permitted pattern
- pathname = gsub(pathname,"([%-%.])","%%%1") -- this also influences
- pathname = gsub(pathname,"/+$", '/.*') -- later usage of pathname
- pathname = gsub(pathname,"//", '/.-/') -- not ok for /// but harmless
- local expr = "^" .. pathname
+ if filelist then
+ local expression
+ -- compare list entries with permitted pattern -- /xx /xx//
+ if not find(pathname,"/$") then
+ expression = pathname .. "/"
+ else
+ expression = pathname
+ end
+ expression = gsub(expression,"([%-%.])","%%%1") -- this also influences
+ expression = gsub(expression,"//+$", '/.*') -- later usage of pathname
+ expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless
+ expression = "^" .. expression .. "$"
+ if trace_detail then
+ logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname)
+ end
for k=1,#filelist do
local fl = filelist[k]
local f = fl[2]
- if find(f,expr) then
- if trace_detail then
- logs.report("fileio",'= found in hash: %s',f)
- end
+ local d = dirlist[k]
+ if find(d,expression) then
--- todo, test for readable
result[#result+1] = fl[3]
resolvers.register_in_trees(f) -- for tracing used files
done = true
- if not instance.allresults then break end
+ if instance.allresults then
+ if trace_detail then
+ logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d)
+ end
+ else
+ if trace_detail then
+ logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d)
+ end
+ break
+ end
+ elseif trace_detail then
+ logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d)
end
end
end
@@ -8078,7 +9922,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
local fname = file.join(ppname,w)
if resolvers.isreadable.file(fname) then
if trace_detail then
- logs.report("fileio",'= found by scanning: %s',fname)
+ logs.report("fileio","found '%s' by scanning",fname)
end
result[#result+1] = fname
done = true
@@ -8141,7 +9985,7 @@ function resolvers.find_given_files(filename)
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local files = instance.files[hash.tag]
+ local files = instance.files[hash.tag] or { }
local blist = files[bname]
if not blist then
local rname = "remap:"..bname
@@ -8251,9 +10095,9 @@ function resolvers.load(option)
statistics.starttiming(instance)
resolvers.resetconfig()
resolvers.identify_cnf()
- resolvers.load_lua()
+ resolvers.load_lua() -- will become the new method
resolvers.expand_variables()
- resolvers.load_cnf()
+ resolvers.load_cnf() -- will be skipped when we have a lua file
resolvers.expand_variables()
if option ~= "nofiles" then
resolvers.load_hash()
@@ -8265,22 +10109,23 @@ end
function resolvers.for_files(command, files, filetype, mustexist)
if files and #files > 0 then
local function report(str)
- if trace_verbose then
+ if trace_locating then
logs.report("fileio",str) -- has already verbose
else
print(str)
end
end
- if trace_verbose then
- report('')
+ if trace_locating then
+ report('') -- ?
end
- for _, file in ipairs(files) do
+ for f=1,#files do
+ local file = files[f]
local result = command(file,filetype,mustexist)
if type(result) == 'string' then
report(result)
else
- for _,v in ipairs(result) do
- report(v)
+ for i=1,#result do
+ report(result[i]) -- could be unpack
end
end
end
@@ -8327,18 +10172,19 @@ end
function table.sequenced(t,sep) -- temp here
local s = { }
- for k, v in pairs(t) do -- pairs?
- s[#s+1] = k .. "=" .. v
+ for k, v in next, t do -- indexed?
+ s[#s+1] = k .. "=" .. tostring(v)
end
return concat(s, sep or " | ")
end
function resolvers.methodhandler(what, filename, filetype) -- ...
+ filename = file.collapse_path(filename)
local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
local scheme = specification.scheme
if resolvers[what][scheme] then
if trace_locating then
- logs.report("fileio",'= handler: %s -> %s -> %s',specification.original,what,table.sequenced(specification))
+ logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
end
return resolvers[what][scheme](filename,filetype) -- todo: specification
else
@@ -8358,8 +10204,9 @@ function resolvers.clean_path(str)
end
function resolvers.do_with_path(name,func)
- for _, v in pairs(resolvers.expanded_path_list(name)) do -- pairs?
- func("^"..resolvers.clean_path(v))
+ local pathlist = resolvers.expanded_path_list(name)
+ for i=1,#pathlist do
+ func("^"..resolvers.clean_path(pathlist[i]))
end
end
@@ -8368,7 +10215,9 @@ function resolvers.do_with_var(name,func)
end
function resolvers.with_files(pattern,handle)
- for _, hash in ipairs(instance.hashes) do
+ local hashes = instance.hashes
+ for i=1,#hashes do
+ local hash = hashes[i]
local blobpath = hash.tag
local blobtype = hash.type
if blobpath then
@@ -8383,7 +10232,7 @@ function resolvers.with_files(pattern,handle)
if type(v) == "string" then
handle(blobtype,blobpath,v,k)
else
- for _,vv in pairs(v) do -- ipairs?
+ for _,vv in next, v do -- indexed
handle(blobtype,blobpath,vv,k)
end
end
@@ -8395,7 +10244,7 @@ function resolvers.with_files(pattern,handle)
end
function resolvers.locate_format(name)
- local barename, fmtname = name:gsub("%.%a+$",""), ""
+ local barename, fmtname = gsub(name,"%.%a+$",""), ""
if resolvers.usecache then
local path = file.join(caches.setpath("formats")) -- maybe platform
fmtname = file.join(path,barename..".fmt") or ""
@@ -8443,7 +10292,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-tmp'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -8467,7 +10316,7 @@ luatools with a recache feature.</p>
local format, lower, gsub = string.format, string.lower, string.gsub
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
+local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -- not used yet
caches = caches or { }
@@ -8554,7 +10403,8 @@ function caches.setpath(...)
caches.path = '.'
end
caches.path = resolvers.clean_path(caches.path)
- if not table.is_empty({...}) then
+ local dirs = { ... }
+ if #dirs > 0 then
local pth = dir.mkdirs(caches.path,...)
return pth
end
@@ -8600,6 +10450,7 @@ function caches.savedata(filepath,filename,data,raw)
if raw then
reduce, simplify = false, false
end
+ data.cache_uuid = os.uuid()
if caches.direct then
file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex
else
@@ -8625,7 +10476,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-res'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -8660,6 +10511,14 @@ prefixes.relative = function(str,n)
return resolvers.clean_path(str)
end
+prefixes.auto = function(str)
+ local fullname = prefixes.relative(str)
+ if not lfs.isfile(fullname) then
+ fullname = prefixes.locate(str)
+ end
+ return fullname
+end
+
prefixes.locate = function(str)
local fullname = resolvers.find_given_file(str) or ""
return resolvers.clean_path((fullname ~= "" and fullname) or str)
@@ -8683,6 +10542,16 @@ prefixes.full = prefixes.locate
prefixes.file = prefixes.filename
prefixes.path = prefixes.pathname
+function resolvers.allprefixes(separator)
+ local all = table.sortedkeys(prefixes)
+ if separator then
+ for i=1,#all do
+ all[i] = all[i] .. ":"
+ end
+ end
+ return all
+end
+
local function _resolve_(method,target)
if prefixes[method] then
return prefixes[method](target)
@@ -8693,7 +10562,8 @@ end
local function resolve(str)
if type(str) == "table" then
- for k, v in pairs(str) do -- ipairs
+ for k=1,#str do
+ local v = str[k]
str[k] = resolve(v) or v
end
elseif str and str ~= "" then
@@ -8706,7 +10576,7 @@ resolvers.resolve = resolve
if os.uname then
- for k, v in pairs(os.uname()) do
+ for k, v in next, os.uname() do
if not prefixes[k] then
prefixes[k] = function() return v end
end
@@ -8721,7 +10591,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-inp'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -8742,7 +10612,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-out'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -8758,7 +10628,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-con'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -8769,8 +10639,6 @@ local format, lower, gsub = string.format, string.lower, string.gsub
local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
-local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end)
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end)
--[[ldx--
<p>Once we found ourselves defining similar cache constructs
@@ -8834,7 +10702,7 @@ end
function containers.is_valid(container, name)
if name and name ~= "" then
local storage = container.storage[name]
- return storage and not table.is_empty(storage) and storage.cache_version == container.version
+ return storage and storage.cache_version == container.version
else
return false
end
@@ -8886,16 +10754,15 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-use'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-local format, lower, gsub = string.format, string.lower, string.gsub
+local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find
-local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end)
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end)
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-- since we want to use the cache instead of the tree, we will now
-- reimplement the saver.
@@ -8939,19 +10806,20 @@ resolvers.automounted = resolvers.automounted or { }
function resolvers.automount(usecache)
local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT'))
- if table.is_empty(mountpaths) and usecache then
+ if (not mountpaths or #mountpaths == 0) and usecache then
mountpaths = { caches.setpath("mount") }
end
- if not table.is_empty(mountpaths) then
+ if mountpaths and #mountpaths > 0 then
statistics.starttiming(resolvers.instance)
- for k, root in pairs(mountpaths) do
+ for k=1,#mountpaths do
+ local root = mountpaths[k]
local f = io.open(root.."/url.tmi")
if f then
for line in f:lines() do
if line then
- if line:find("^[%%#%-]") then -- or %W
+ if find(line,"^[%%#%-]") then -- or %W
-- skip
- elseif line:find("^zip://") then
+ elseif find(line,"^zip://") then
if trace_locating then
logs.report("fileio","mounting %s",line)
end
@@ -8996,11 +10864,13 @@ function statistics.check_fmt_status(texname)
local luv = dofile(luvname)
if luv and luv.sourcefile then
local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
- if luv.enginebanner and luv.enginebanner ~= enginebanner then
- return "engine mismatch"
+ local luvbanner = luv.enginebanner or "?"
+ if luvbanner ~= enginebanner then
+ return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
end
- if luv.sourcehash and luv.sourcehash ~= sourcehash then
- return "source mismatch"
+ local luvhash = luv.sourcehash or "?"
+ if luvhash ~= sourcehash then
+ return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
end
else
return "invalid status file"
@@ -9019,18 +10889,22 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-zip'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-local format, find = string.format, string.find
+local format, find, match = string.format, string.find, string.match
+local unpack = unpack or table.unpack
-local trace_locating, trace_verbose = false, false
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-trackers.register("resolvers.verbose", function(v) trace_verbose = v end)
-trackers.register("resolvers.locating", function(v) trace_locating = v trace_verbose = v end)
+-- zip:///oeps.zip?name=bla/bla.tex
+-- zip:///oeps.zip?tree=tex/texmf-local
+-- zip:///texmf.zip?tree=/tex/texmf
+-- zip:///texmf.zip?tree=/tex/texmf-local
+-- zip:///texmf-mine.zip?tree=/tex/texmf-projects
zip = zip or { }
zip.archives = zip.archives or { }
@@ -9041,9 +10915,6 @@ local locators, hashers, concatinators = resolvers.locators, resolvers.hashers,
local archives = zip.archives
--- zip:///oeps.zip?name=bla/bla.tex
--- zip:///oeps.zip?tree=tex/texmf-local
-
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -9073,26 +10944,22 @@ function zip.closearchive(name)
end
end
--- zip:///texmf.zip?tree=/tex/texmf
--- zip:///texmf.zip?tree=/tex/texmf-local
--- zip:///texmf-mine.zip?tree=/tex/texmf-projects
-
function locators.zip(specification) -- where is this used? startup zips (untested)
specification = resolvers.splitmethod(specification)
local zipfile = specification.path
local zfile = zip.openarchive(name) -- tricky, could be in to be initialized tree
if trace_locating then
if zfile then
- logs.report("fileio",'! zip locator, found: %s',specification.original)
+ logs.report("fileio","zip locator, archive '%s' found",specification.original)
else
- logs.report("fileio",'? zip locator, not found: %s',specification.original)
+ logs.report("fileio","zip locator, archive '%s' not found",specification.original)
end
end
end
function hashers.zip(tag,name)
- if trace_verbose then
- logs.report("fileio","loading zip file %s as %s",name,tag)
+ if trace_locating then
+ logs.report("fileio","loading zip file '%s' as '%s'",name,tag)
end
resolvers.usezipfile(format("%s?tree=%s",tag,name))
end
@@ -9117,23 +10984,25 @@ function finders.zip(specification,filetype)
local zfile = zip.openarchive(specification.path)
if zfile then
if trace_locating then
- logs.report("fileio",'! zip finder, path: %s',specification.path)
+ logs.report("fileio","zip finder, archive '%s' found",specification.path)
end
local dfile = zfile:open(q.name)
if dfile then
dfile = zfile:close()
if trace_locating then
- logs.report("fileio",'+ zip finder, name: %s',q.name)
+ logs.report("fileio","zip finder, file '%s' found",q.name)
end
return specification.original
+ elseif trace_locating then
+ logs.report("fileio","zip finder, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio",'? zip finder, path %s',specification.path)
+ logs.report("fileio","zip finder, unknown archive '%s'",specification.path)
end
end
end
if trace_locating then
- logs.report("fileio",'- zip finder, name: %s',filename)
+ logs.report("fileio","zip finder, '%s' not found",filename)
end
return unpack(finders.notfound)
end
@@ -9146,20 +11015,25 @@ function openers.zip(specification)
local zfile = zip.openarchive(zipspecification.path)
if zfile then
if trace_locating then
- logs.report("fileio",'+ zip starter, path: %s',zipspecification.path)
+ logs.report("fileio","zip opener, archive '%s' opened",zipspecification.path)
end
local dfile = zfile:open(q.name)
if dfile then
logs.show_open(specification)
+ if trace_locating then
+ logs.report("fileio","zip opener, file '%s' found",q.name)
+ end
return openers.text_opener(specification,dfile,'zip')
+ elseif trace_locating then
+ logs.report("fileio","zip opener, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio",'- zip starter, path %s',zipspecification.path)
+ logs.report("fileio","zip opener, unknown archive '%s'",zipspecification.path)
end
end
end
if trace_locating then
- logs.report("fileio",'- zip opener, name: %s',filename)
+ logs.report("fileio","zip opener, '%s' not found",filename)
end
return unpack(openers.notfound)
end
@@ -9172,25 +11046,27 @@ function loaders.zip(specification)
local zfile = zip.openarchive(specification.path)
if zfile then
if trace_locating then
- logs.report("fileio",'+ zip starter, path: %s',specification.path)
+ logs.report("fileio","zip loader, archive '%s' opened",specification.path)
end
local dfile = zfile:open(q.name)
if dfile then
logs.show_load(filename)
if trace_locating then
- logs.report("fileio",'+ zip loader, name: %s',filename)
+ logs.report("fileio","zip loader, file '%s' loaded",filename)
end
local s = dfile:read("*all")
dfile:close()
return true, s, #s
+ elseif trace_locating then
+ logs.report("fileio","zip loader, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio",'- zip starter, path: %s',specification.path)
+ logs.report("fileio","zip loader, unknown archive '%s'",specification.path)
end
end
end
if trace_locating then
- logs.report("fileio",'- zip loader, name: %s',filename)
+ logs.report("fileio","zip loader, '%s' not found",filename)
end
return unpack(openers.notfound)
end
@@ -9200,21 +11076,15 @@ end
function resolvers.usezipfile(zipname)
zipname = validzip(zipname)
- if trace_locating then
- logs.report("fileio",'! zip use, file: %s',zipname)
- end
local specification = resolvers.splitmethod(zipname)
local zipfile = specification.path
if zipfile and not zip.registeredfiles[zipname] then
local tree = url.query(specification.query).tree or ""
- if trace_locating then
- logs.report("fileio",'! zip register, file: %s',zipname)
- end
local z = zip.openarchive(zipfile)
if z then
local instance = resolvers.instance
if trace_locating then
- logs.report("fileio","= zipfile, registering: %s",zipname)
+ logs.report("fileio","zip registering, registering archive '%s'",zipname)
end
statistics.starttiming(instance)
resolvers.prepend_hash('zip',zipname,zipfile)
@@ -9223,10 +11093,10 @@ function resolvers.usezipfile(zipname)
instance.files[zipname] = resolvers.register_zip_file(z,tree or "")
statistics.stoptiming(instance)
elseif trace_locating then
- logs.report("fileio","? zipfile, unknown: %s",zipname)
+ logs.report("fileio","zip registering, unknown archive '%s'",zipname)
end
elseif trace_locating then
- logs.report("fileio",'! zip register, no file: %s',zipname)
+ logs.report("fileio","zip registering, '%s' not found",zipname)
end
end
@@ -9238,11 +11108,11 @@ function resolvers.register_zip_file(z,tree)
filter = format("^%s/(.+)/(.-)$",tree)
end
if trace_locating then
- logs.report("fileio",'= zip filter: %s',filter)
+ logs.report("fileio","zip registering, using filter '%s'",filter)
end
local register, n = resolvers.register_file, 0
for i in z:files() do
- local path, name = i.filename:match(filter)
+ local path, name = match(i.filename,filter)
if path then
if name and name ~= '' then
register(files, name, path)
@@ -9255,7 +11125,7 @@ function resolvers.register_zip_file(z,tree)
n = n + 1
end
end
- logs.report("fileio",'= zip entries: %s',n)
+ logs.report("fileio","zip registering, %s files registered",n)
return files
end
@@ -9266,12 +11136,14 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-crl'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
+local gsub = string.gsub
+
curl = curl or { }
curl.cached = { }
@@ -9280,9 +11152,9 @@ curl.cachepath = caches.definepath("curl")
local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
function curl.fetch(protocol, name)
- local cachename = curl.cachepath() .. "/" .. name:gsub("[^%a%d%.]+","-")
--- cachename = cachename:gsub("[\\/]", io.fileseparator)
- cachename = cachename:gsub("[\\]", "/") -- cleanup
+ local cachename = curl.cachepath() .. "/" .. gsub(name,"[^%a%d%.]+","-")
+-- cachename = gsub(cachename,"[\\/]", io.fileseparator)
+ cachename = gsub(cachename,"[\\]", "/") -- cleanup
if not curl.cached[name] then
if not io.exists(cachename) then
curl.cached[name] = cachename
@@ -9328,6 +11200,164 @@ end -- of closure
do -- create closure to overcome 200 locals limit
+if not modules then modules = { } end modules ['data-lua'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- some loading stuff ... we might move this one to slot 2 depending
+-- on the developments (the loaders must not trigger kpse); we could
+-- of course use a more extensive lib path spec
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local gsub, insert = string.gsub, table.insert
+local unpack = unpack or table.unpack
+
+local libformats = { 'luatexlibs', 'tex', 'texmfscripts', 'othertextfiles' } -- 'luainputs'
+local clibformats = { 'lib' }
+
+local _path_, libpaths, _cpath_, clibpaths
+
+function package.libpaths()
+ if not _path_ or package.path ~= _path_ then
+ _path_ = package.path
+ libpaths = file.split_path(_path_,";")
+ end
+ return libpaths
+end
+
+function package.clibpaths()
+ if not _cpath_ or package.cpath ~= _cpath_ then
+ _cpath_ = package.cpath
+ clibpaths = file.split_path(_cpath_,";")
+ end
+ return clibpaths
+end
+
+local function thepath(...)
+ local t = { ... } t[#t+1] = "?.lua"
+ local path = file.join(unpack(t))
+ if trace_locating then
+ logs.report("fileio","! appending '%s' to 'package.path'",path)
+ end
+ return path
+end
+
+local p_libpaths, a_libpaths = { }, { }
+
+function package.append_libpath(...)
+ insert(a_libpath,thepath(...))
+end
+
+function package.prepend_libpath(...)
+ insert(p_libpaths,1,thepath(...))
+end
+
+-- beware, we need to return a loadfile result !
+
+local function loaded(libpaths,name,simple)
+ for i=1,#libpaths do -- package.path, might become option
+ local libpath = libpaths[i]
+ local resolved = gsub(libpath,"%?",simple)
+ if trace_locating then -- more detail
+ logs.report("fileio","! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
+ end
+ if resolvers.isreadable.file(resolved) then
+ if trace_locating then
+ logs.report("fileio","! lib '%s' located via 'package.path': '%s'",name,resolved)
+ end
+ return loadfile(resolved)
+ end
+ end
+end
+
+
+package.loaders[2] = function(name) -- was [#package.loaders+1]
+ if trace_locating then -- mode detail
+ logs.report("fileio","! locating '%s'",name)
+ end
+ for i=1,#libformats do
+ local format = libformats[i]
+ local resolved = resolvers.find_file(name,format) or ""
+ if trace_locating then -- mode detail
+ logs.report("fileio","! checking for '%s' using 'libformat path': '%s'",name,format)
+ end
+ if resolved ~= "" then
+ if trace_locating then
+ logs.report("fileio","! lib '%s' located via environment: '%s'",name,resolved)
+ end
+ return loadfile(resolved)
+ end
+ end
+ -- libpaths
+ local libpaths, clibpaths = package.libpaths(), package.clibpaths()
+ local simple = gsub(name,"%.lua$","")
+ local simple = gsub(simple,"%.","/")
+ local resolved = loaded(p_libpaths,name,simple) or loaded(libpaths,name,simple) or loaded(a_libpaths,name,simple)
+ if resolved then
+ return resolved
+ end
+ --
+ local libname = file.addsuffix(simple,os.libsuffix)
+ for i=1,#clibformats do
+ -- better have a dedicated loop
+ local format = clibformats[i]
+ local paths = resolvers.expanded_path_list_from_var(format)
+ for p=1,#paths do
+ local path = paths[p]
+ local resolved = file.join(path,libname)
+ if trace_locating then -- mode detail
+ logs.report("fileio","! checking for '%s' using 'clibformat path': '%s'",libname,path)
+ end
+ if resolvers.isreadable.file(resolved) then
+ if trace_locating then
+ logs.report("fileio","! lib '%s' located via 'clibformat': '%s'",libname,resolved)
+ end
+ return package.loadlib(resolved,name)
+ end
+ end
+ end
+ for i=1,#clibpaths do -- package.path, might become option
+ local libpath = clibpaths[i]
+ local resolved = gsub(libpath,"?",simple)
+ if trace_locating then -- more detail
+ logs.report("fileio","! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
+ end
+ if resolvers.isreadable.file(resolved) then
+ if trace_locating then
+ logs.report("fileio","! lib '%s' located via 'package.cpath': '%s'",name,resolved)
+ end
+ return package.loadlib(resolved,name)
+ end
+ end
+ -- just in case the distribution is messed up
+ if trace_loading then -- more detail
+ logs.report("fileio","! checking for '%s' using 'luatexlibs': '%s'",name)
+ end
+ local resolved = resolvers.find_file(file.basename(name),'luatexlibs') or ""
+ if resolved ~= "" then
+ if trace_locating then
+ logs.report("fileio","! lib '%s' located by basename via environment: '%s'",name,resolved)
+ end
+ return loadfile(resolved)
+ end
+ if trace_locating then
+ logs.report("fileio",'? unable to locate lib: %s',name)
+ end
+-- return "unable to locate " .. name
+end
+
+resolvers.loadlualib = require
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
if not modules then modules = { } end modules ['luat-kps'] = {
version = 1.001,
comment = "companion to luatools.lua",
@@ -9437,7 +11467,7 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-aux'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -9445,47 +11475,47 @@ if not modules then modules = { } end modules ['data-aux'] = {
local find = string.find
-local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end)
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix
local scriptpath = "scripts/context/lua"
newname = file.addsuffix(newname,"lua")
local oldscript = resolvers.clean_path(oldname)
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","to be replaced old script %s", oldscript)
end
local newscripts = resolvers.find_files(newname) or { }
if #newscripts == 0 then
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","unable to locate new script")
end
else
for i=1,#newscripts do
local newscript = resolvers.clean_path(newscripts[i])
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","checking new script %s", newscript)
end
if oldscript == newscript then
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","old and new script are the same")
end
elseif not find(newscript,scriptpath) then
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","new script should come from %s",scriptpath)
end
elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","invalid new script name")
end
else
local newdata = io.loaddata(newscript)
if newdata then
- if trace_verbose then
+ if trace_locating then
logs.report("fileio","old script content replaced by new content")
end
io.savedata(oldscript,newdata)
break
- elseif trace_verbose then
+ elseif trace_locating then
logs.report("fileio","unable to load new script")
end
end
@@ -9500,25 +11530,28 @@ do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-tmf'] = {
version = 1.001,
- comment = "companion to luat-lib.tex",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
+local find, gsub, match = string.find, string.gsub, string.match
+local getenv, setenv = os.getenv, os.setenv
+
-- loads *.tmf files in minimal tree roots (to be optimized and documented)
function resolvers.check_environment(tree)
logs.simpleline()
- os.setenv('TMP', os.getenv('TMP') or os.getenv('TEMP') or os.getenv('TMPDIR') or os.getenv('HOME'))
- os.setenv('TEXOS', os.getenv('TEXOS') or ("texmf-" .. os.currentplatform()))
- os.setenv('TEXPATH', (tree or "tex"):gsub("\/+$",''))
- os.setenv('TEXMFOS', os.getenv('TEXPATH') .. "/" .. os.getenv('TEXOS'))
+ setenv('TMP', getenv('TMP') or getenv('TEMP') or getenv('TMPDIR') or getenv('HOME'))
+ setenv('TEXOS', getenv('TEXOS') or ("texmf-" .. os.platform))
+ setenv('TEXPATH', gsub(tree or "tex","\/+$",''))
+ setenv('TEXMFOS', getenv('TEXPATH') .. "/" .. getenv('TEXOS'))
logs.simpleline()
- logs.simple("preset : TEXPATH => %s", os.getenv('TEXPATH'))
- logs.simple("preset : TEXOS => %s", os.getenv('TEXOS'))
- logs.simple("preset : TEXMFOS => %s", os.getenv('TEXMFOS'))
- logs.simple("preset : TMP => %s", os.getenv('TMP'))
+ logs.simple("preset : TEXPATH => %s", getenv('TEXPATH'))
+ logs.simple("preset : TEXOS => %s", getenv('TEXOS'))
+ logs.simple("preset : TEXMFOS => %s", getenv('TEXMFOS'))
+ logs.simple("preset : TMP => %s", getenv('TMP'))
logs.simple('')
end
@@ -9526,27 +11559,27 @@ function resolvers.load_environment(name) -- todo: key=value as well as lua
local f = io.open(name)
if f then
for line in f:lines() do
- if line:find("^[%%%#]") then
+ if find(line,"^[%%%#]") then
-- skip comment
else
- local key, how, value = line:match("^(.-)%s*([<=>%?]+)%s*(.*)%s*$")
+ local key, how, value = match(line,"^(.-)%s*([<=>%?]+)%s*(.*)%s*$")
if how then
- value = value:gsub("%%(.-)%%", function(v) return os.getenv(v) or "" end)
+ value = gsub(value,"%%(.-)%%", function(v) return getenv(v) or "" end)
if how == "=" or how == "<<" then
- os.setenv(key,value)
+ setenv(key,value)
elseif how == "?" or how == "??" then
- os.setenv(key,os.getenv(key) or value)
+ setenv(key,getenv(key) or value)
elseif how == "<" or how == "+=" then
- if os.getenv(key) then
- os.setenv(key,os.getenv(key) .. io.fileseparator .. value)
+ if getenv(key) then
+ setenv(key,getenv(key) .. io.fileseparator .. value)
else
- os.setenv(key,value)
+ setenv(key,value)
end
elseif how == ">" or how == "=+" then
- if os.getenv(key) then
- os.setenv(key,value .. io.pathseparator .. os.getenv(key))
+ if getenv(key) then
+ setenv(key,value .. io.pathseparator .. getenv(key))
else
- os.setenv(key,value)
+ setenv(key,value)
end
end
end
@@ -9585,6 +11618,9 @@ if not modules then modules = { } end modules ['luat-sta'] = {
-- this code is used in the updater
+local gmatch, match = string.gmatch, string.match
+local type = type
+
states = states or { }
states.data = states.data or { }
states.hash = states.hash or { }
@@ -9613,13 +11649,17 @@ function states.set_by_tag(tag,key,value,default,persistent)
if d then
if type(d) == "table" then
local dkey, hkey = key, key
- local pre, post = key:match("(.+)%.([^%.]+)$")
+ local pre, post = match(key,"(.+)%.([^%.]+)$")
if pre and post then
- for k in pre:gmatch("[^%.]+") do
+ for k in gmatch(pre,"[^%.]+") do
local dk = d[k]
if not dk then
dk = { }
d[k] = dk
+ elseif type(dk) == "string" then
+ -- invalid table, unable to upgrade structure
+ -- hope for the best or delete the state file
+ break
end
d = dk
end
@@ -9647,7 +11687,7 @@ function states.get_by_tag(tag,key,default)
else
local d = states.data[tag]
if d then
- for k in key:gmatch("[^%.]+") do
+ for k in gmatch(key,"[^%.]+") do
local dk = d[k]
if dk then
d = dk
@@ -9782,6 +11822,7 @@ own.libs = { -- todo: check which ones are really needed
'l-os.lua',
'l-file.lua',
'l-md5.lua',
+ 'l-url.lua',
'l-dir.lua',
'l-boolean.lua',
'l-math.lua',
@@ -9790,11 +11831,13 @@ own.libs = { -- todo: check which ones are really needed
'l-utils.lua',
'l-aux.lua',
-- 'l-xml.lua',
+ 'trac-tra.lua',
'lxml-tab.lua',
- 'lxml-pth.lua',
- 'lxml-ent.lua',
+ 'lxml-lpt.lua',
+-- 'lxml-ent.lua',
'lxml-mis.lua',
- 'trac-tra.lua',
+ 'lxml-aux.lua',
+ 'lxml-xml.lua',
'luat-env.lua',
'trac-inf.lua',
'trac-log.lua',
@@ -9809,7 +11852,7 @@ own.libs = { -- todo: check which ones are really needed
-- 'data-bin.lua',
'data-zip.lua',
'data-crl.lua',
--- 'data-lua.lua',
+ 'data-lua.lua',
'data-kps.lua', -- so that we can replace kpsewhich
'data-aux.lua', -- updater
'data-tmf.lua', -- tree files
@@ -9827,7 +11870,8 @@ end
-- End of hack.
-own.name = (environment and environment.ownname) or arg[0] or 'luatools.lua'
+own.name = (environment and environment.ownname) or arg[0] or 'luatools.lua'
+
own.path = string.match(own.name,"^(.+)[\\/].-$") or "."
own.list = { '.' }
@@ -9865,18 +11909,25 @@ if not resolvers then
os.exit()
end
-logs.setprogram('MTXrun',"TDS Runner Tool 1.22",environment.arguments["verbose"] or false)
+logs.setprogram('MTXrun',"TDS Runner Tool 1.24",environment.arguments["verbose"] or false)
local instance = resolvers.reset()
+local trackspec = environment.argument("trackers") or environment.argument("track")
+
+if trackspec then
+ trackers.enable(trackspec)
+end
+
runners = runners or { } -- global
messages = messages or { }
messages.help = [[
---script run an mtx script (--noquotes)
---execute run a script or program (--noquotes)
+--script run an mtx script (lua prefered method) (--noquotes), no script gives list
+--execute run a script or program (texmfstart method) (--noquotes)
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
+--internal run script using built in libraries (same as --ctxlua)
--locate locate given filename
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
@@ -9893,16 +11944,20 @@ messages.help = [[
--unix create unix (linux) stubs
--verbose give a bit more info
+--trackers=list enable given trackers
--engine=str target engine
--progname=str format or backend
--edit launch editor with found file
--launch (--all) launch files like manuals, assumes os support
---intern run script using built in libraries
+--timedrun run a script an time its run
+--autogenerate regenerate databases if needed (handy when used to run context in an editor)
+
+--usekpse use kpse as fallback (when no mkiv and cache installed, often slower)
+--forcekpse force using kpse (handy when no mkiv and cache installed but less functionality)
---usekpse use kpse as fallback (when no mkiv and cache installed, often slower)
---forcekpse force using kpse (handy when no mkiv and cache installed but less functionality)
+--prefixes show supported prefixes
]]
runners.applications = {
@@ -9918,20 +11973,17 @@ runners.suffixes = {
}
runners.registered = {
- texexec = { 'texexec.rb', true }, -- context mkii runner (only tool not to be luafied)
+ texexec = { 'texexec.rb', false }, -- context mkii runner (only tool not to be luafied)
texutil = { 'texutil.rb', true }, -- old perl based index sorter for mkii (old versions need it)
texfont = { 'texfont.pl', true }, -- perl script that makes mkii font metric files
texfind = { 'texfind.pl', false }, -- perltk based tex searching tool, mostly used at pragma
texshow = { 'texshow.pl', false }, -- perltk based context help system, will be luafied
- -- texwork = { \texwork.pl', false }, -- perltk based editing environment, only used at pragma
-
+ -- texwork = { 'texwork.pl', false }, -- perltk based editing environment, only used at pragma
makempy = { 'makempy.pl', true },
mptopdf = { 'mptopdf.pl', true },
pstopdf = { 'pstopdf.rb', true }, -- converts ps (and some more) images, does some cleaning (replaced)
-
-- examplex = { 'examplex.rb', false },
concheck = { 'concheck.rb', false },
-
runtools = { 'runtools.rb', true },
textools = { 'textools.rb', true },
tmftools = { 'tmftools.rb', true },
@@ -9943,7 +11995,6 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
-
pdftrimwhite = { 'pdftrimwhite.pl', false }
}
@@ -9952,6 +12003,13 @@ runners.launchers = {
unix = { }
}
+-- like runners.libpath("framework"): looks on script's subpath
+
+function runners.libpath(...)
+ package.prepend_libpath(file.dirname(environment.ownscript),...)
+ package.prepend_libpath(file.dirname(environment.ownname) ,...)
+end
+
function runners.prepare()
local checkname = environment.argument("ifchanged")
if checkname and checkname ~= "" then
@@ -9996,7 +12054,7 @@ function runners.prepare()
return "run"
end
-function runners.execute_script(fullname,internal)
+function runners.execute_script(fullname,internal,nosplit)
local noquote = environment.argument("noquotes")
if fullname and fullname ~= "" then
local state = runners.prepare()
@@ -10036,17 +12094,20 @@ function runners.execute_script(fullname,internal)
end
end
if result and result ~= "" then
- local before, after = environment.split_arguments(fullname) -- already done
- environment.arguments_before, environment.arguments_after = before, after
+ if not no_split then
+ local before, after = environment.split_arguments(fullname) -- already done
+ environment.arguments_before, environment.arguments_after = before, after
+ end
if internal then
- arg = { } for _,v in pairs(after) do arg[#arg+1] = v end
+ arg = { } for _,v in pairs(environment.arguments_after) do arg[#arg+1] = v end
+ environment.ownscript = result
dofile(result)
else
local binary = runners.applications[file.extname(result)]
if binary and binary ~= "" then
result = binary .. " " .. result
end
- local command = result .. " " .. environment.reconstruct_commandline(after,noquote)
+ local command = result .. " " .. environment.reconstruct_commandline(environment.arguments_after,noquote)
if logs.verbose then
logs.simpleline()
logs.simple("executing: %s",command)
@@ -10054,8 +12115,24 @@ function runners.execute_script(fullname,internal)
logs.simpleline()
io.flush()
end
- local code = os.exec(command) -- maybe spawn
- return code == 0
+ -- no os.exec because otherwise we get the wrong return value
+ local code = os.execute(command) -- maybe spawn
+ if code == 0 then
+ return true
+ else
+ if binary then
+ binary = file.addsuffix(binary,os.binsuffix)
+ for p in string.gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
+ if lfs.isfile(file.join(p,binary)) then
+ return false
+ end
+ end
+ logs.simpleline()
+ logs.simple("This script needs '%s' which seems not to be installed.",binary)
+ logs.simpleline()
+ end
+ return false
+ end
end
end
end
@@ -10088,7 +12165,7 @@ function runners.execute_program(fullname)
return false
end
--- the --usekpse flag will fallback on kpse
+-- the --usekpse flag will fallback on kpse (hm, we can better update mtx-stubs)
local windows_stub = '@echo off\013\010setlocal\013\010set ownpath=%%~dp0%%\013\010texlua "%%ownpath%%mtxrun.lua" --usekpse --execute %s %%*\013\010endlocal\013\010'
local unix_stub = '#!/bin/sh\010mtxrun --usekpse --execute %s \"$@\"\010'
@@ -10143,7 +12220,7 @@ function runners.locate_file(filename)
end
function runners.locate_platform()
- runners.report_location(os.currentplatform())
+ runners.report_location(os.platform)
end
function runners.report_location(result)
@@ -10176,7 +12253,8 @@ end
function runners.save_script_session(filename, list)
local t = { }
- for _, key in ipairs(list) do
+ for i=1,#list do
+ local key = list[i]
t[key] = environment.arguments[key]
end
io.savedata(filename,table.serialize(t,true))
@@ -10265,20 +12343,22 @@ function runners.find_mtx_script(filename)
if fullname and fullname ~= "" then
return fullname
end
+ -- mtx- prefix checking
+ local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
-- context namespace, mtx-<filename>
- fullname = "mtx-" .. filename
+ fullname = mtxprefix .. filename
fullname = found(fullname) or resolvers.find_file(fullname)
if fullname and fullname ~= "" then
return fullname
end
-- context namespace, mtx-<filename>s
- fullname = "mtx-" .. basename .. "s" .. "." .. suffix
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
fullname = found(fullname) or resolvers.find_file(fullname)
if fullname and fullname ~= "" then
return fullname
end
-- context namespace, mtx-<filename minus trailing s>
- fullname = "mtx-" .. basename:gsub("s$","") .. "." .. suffix
+ fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
fullname = found(fullname) or resolvers.find_file(fullname)
if fullname and fullname ~= "" then
return fullname
@@ -10288,9 +12368,17 @@ function runners.find_mtx_script(filename)
return fullname
end
-function runners.execute_ctx_script(filename,arguments)
+function runners.execute_ctx_script(filename)
+ local arguments = environment.arguments_after
local fullname = runners.find_mtx_script(filename) or ""
- -- retyr after generate but only if --autogenerate
+ if file.extname(fullname) == "cld" then
+ -- handy in editors where we force --autopdf
+ logs.simple("running cld script: %s",filename)
+ table.insert(arguments,1,fullname)
+ table.insert(arguments,"--autopdf")
+ fullname = runners.find_mtx_script("context") or ""
+ end
+ -- retry after generate but only if --autogenerate
if fullname == "" and environment.argument("autogenerate") then -- might become the default
instance.renewcache = true
logs.setverbose(true)
@@ -10319,32 +12407,51 @@ function runners.execute_ctx_script(filename,arguments)
if logs.verbose then
logs.simple("using script: %s\n",fullname)
end
+ environment.ownscript = fullname
dofile(fullname)
local savename = environment.arguments['save']
- if savename and runners.save_list and not table.is_empty(runners.save_list or { }) then
- if type(savename) ~= "string" then savename = file.basename(fullname) end
- savename = file.replacesuffix(savename,"cfg")
- runners.save_script_session(savename, runners.save_list)
+ if savename then
+ local save_list = runners.save_list
+ if save_list and next(save_list) then
+ if type(savename) ~= "string" then savename = file.basename(fullname) end
+ savename = file.replacesuffix(savename,"cfg")
+ runners.save_script_session(savename,save_list)
+ end
end
return true
end
else
- logs.setverbose(true)
- if filename == "" then
- logs.simple("unknown script, no name given")
+ -- logs.setverbose(true)
+ if filename == "" or filename == "help" then
local context = resolvers.find_file("mtx-context.lua")
+ logs.setverbose(true)
if context ~= "" then
local result = dir.glob((string.gsub(context,"mtx%-context","mtx-*"))) -- () needed
local valid = { }
- for _, scriptname in ipairs(result) do
- scriptname = string.match(scriptname,".*mtx%-([^%-]-)%.lua")
- if scriptname then
- valid[#valid+1] = scriptname
+ table.sort(result)
+ for i=1,#result do
+ local scriptname = result[i]
+ local scriptbase = string.match(scriptname,".*mtx%-([^%-]-)%.lua")
+ if scriptbase then
+ local data = io.loaddata(scriptname)
+ local banner, version = string.match(data,"[\n\r]logs%.extendbanner%s*%(%s*[\"\']([^\n\r]+)%s*(%d+%.%d+)")
+ if banner then
+ valid[#valid+1] = { scriptbase, version, banner }
+ end
end
end
if #valid > 0 then
- logs.simple("known scripts: %s",table.concat(valid,", "))
+ logs.reportbanner()
+ logs.reportline()
+ logs.simple("no script name given, known scripts:")
+ logs.simple()
+ for k=1,#valid do
+ local v = valid[k]
+ logs.simple("%-12s %4s %s",v[1],v[2],v[3])
+ end
end
+ else
+ logs.simple("no script name given")
end
else
filename = file.addsuffix(filename,"lua")
@@ -10358,6 +12465,12 @@ function runners.execute_ctx_script(filename,arguments)
end
end
+function runners.prefixes()
+ logs.reportbanner()
+ logs.reportline()
+ logs.simple(table.concat(resolvers.allprefixes(true)," "))
+end
+
function runners.timedrun(filename) -- just for me
if filename and filename ~= "" then
runners.timed(function() os.execute(filename) end)
@@ -10385,7 +12498,9 @@ instance.lsrmode = environment.argument("lsr") or false
-- maybe the unset has to go to this level
-if environment.argument("usekpse") or environment.argument("forcekpse") then
+local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
+
+if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
os.setenv("engine","")
os.setenv("progname","")
@@ -10420,7 +12535,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") then
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") then
+ elseif environment.argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -10449,7 +12564,6 @@ else
end
-
if environment.argument("selfmerge") then
-- embed used libraries
utils.merger.selfmerge(own.name,own.libs,own.list)
@@ -10462,9 +12576,14 @@ elseif environment.argument("selfupdate") then
elseif environment.argument("ctxlua") or environment.argument("internal") then
-- run a script by loading it (using libs)
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("s") then
+elseif environment.argument("script") or environment.argument("scripts") then
-- run a script by loading it (using libs), pass args
- ok = runners.execute_ctx_script(filename,after)
+ if is_mkii_stub then
+ -- execute mkii script
+ ok = runners.execute_script(filename,false,true)
+ else
+ ok = runners.execute_ctx_script(filename)
+ end
elseif environment.argument("execute") then
-- execute script
ok = runners.execute_script(filename)
@@ -10491,6 +12610,8 @@ elseif environment.argument("locate") then
elseif environment.argument("platform")then
-- locate platform
runners.locate_platform()
+elseif environment.argument("prefixes") then
+ runners.prefixes()
elseif environment.argument("timedrun") then
-- locate platform
runners.timedrun(filename)
@@ -10499,8 +12620,14 @@ elseif environment.argument("help") or filename=='help' or filename == "" then
-- execute script
elseif filename:find("^bin:") then
ok = runners.execute_program(filename)
+elseif is_mkii_stub then
+ -- execute mkii script
+ ok = runners.execute_script(filename,false,true)
else
- ok = runners.execute_script(filename)
+ ok = runners.execute_ctx_script(filename)
+ if not ok then
+ ok = runners.execute_script(filename)
+ end
end
if os.platform == "unix" then